You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

tf13_keras_regression-hp-search-sklearn.ipynb 305 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958
  1. {
  2. "cells": [
  3. {
  4. "cell_type": "code",
  5. "execution_count": 1,
  6. "metadata": {},
  7. "outputs": [
  8. {
  9. "name": "stdout",
  10. "output_type": "stream",
  11. "text": [
  12. "2.2.0\n",
  13. "sys.version_info(major=3, minor=6, micro=9, releaselevel='final', serial=0)\n",
  14. "matplotlib 3.2.1\n",
  15. "numpy 1.18.5\n",
  16. "pandas 1.0.4\n",
  17. "sklearn 0.23.1\n",
  18. "tensorflow 2.2.0\n",
  19. "tensorflow.keras 2.3.0-tf\n"
  20. ]
  21. }
  22. ],
  23. "source": [
  24. "import matplotlib as mpl\n",
  25. "import matplotlib.pyplot as plt\n",
  26. "%matplotlib inline\n",
  27. "import numpy as np\n",
  28. "import sklearn\n",
  29. "import pandas as pd\n",
  30. "import os\n",
  31. "import sys\n",
  32. "import time\n",
  33. "import tensorflow as tf\n",
  34. "\n",
  35. "from tensorflow import keras\n",
  36. "\n",
  37. "print(tf.__version__)\n",
  38. "print(sys.version_info)\n",
  39. "for module in mpl, np, pd, sklearn, tf, keras:\n",
  40. " print(module.__name__, module.__version__)"
  41. ]
  42. },
  43. {
  44. "cell_type": "code",
  45. "execution_count": 2,
  46. "metadata": {},
  47. "outputs": [
  48. {
  49. "name": "stdout",
  50. "output_type": "stream",
  51. "text": [
  52. ".. _california_housing_dataset:\n",
  53. "\n",
  54. "California Housing dataset\n",
  55. "--------------------------\n",
  56. "\n",
  57. "**Data Set Characteristics:**\n",
  58. "\n",
  59. " :Number of Instances: 20640\n",
  60. "\n",
  61. " :Number of Attributes: 8 numeric, predictive attributes and the target\n",
  62. "\n",
  63. " :Attribute Information:\n",
  64. " - MedInc median income in block\n",
  65. " - HouseAge median house age in block\n",
  66. " - AveRooms average number of rooms\n",
  67. " - AveBedrms average number of bedrooms\n",
  68. " - Population block population\n",
  69. " - AveOccup average house occupancy\n",
  70. " - Latitude house block latitude\n",
  71. " - Longitude house block longitude\n",
  72. "\n",
  73. " :Missing Attribute Values: None\n",
  74. "\n",
  75. "This dataset was obtained from the StatLib repository.\n",
  76. "http://lib.stat.cmu.edu/datasets/\n",
  77. "\n",
  78. "The target variable is the median house value for California districts.\n",
  79. "\n",
  80. "This dataset was derived from the 1990 U.S. census, using one row per census\n",
  81. "block group. A block group is the smallest geographical unit for which the U.S.\n",
  82. "Census Bureau publishes sample data (a block group typically has a population\n",
  83. "of 600 to 3,000 people).\n",
  84. "\n",
  85. "It can be downloaded/loaded using the\n",
  86. ":func:`sklearn.datasets.fetch_california_housing` function.\n",
  87. "\n",
  88. ".. topic:: References\n",
  89. "\n",
  90. " - Pace, R. Kelley and Ronald Barry, Sparse Spatial Autoregressions,\n",
  91. " Statistics and Probability Letters, 33 (1997) 291-297\n",
  92. "\n",
  93. "(20640, 8)\n",
  94. "(20640,)\n"
  95. ]
  96. }
  97. ],
  98. "source": [
  99. "from sklearn.datasets import fetch_california_housing\n",
  100. "\n",
  101. "housing = fetch_california_housing()\n",
  102. "print(housing.DESCR)\n",
  103. "print(housing.data.shape)\n",
  104. "print(housing.target.shape)"
  105. ]
  106. },
  107. {
  108. "cell_type": "code",
  109. "execution_count": 3,
  110. "metadata": {},
  111. "outputs": [
  112. {
  113. "name": "stdout",
  114. "output_type": "stream",
  115. "text": [
  116. "(11610, 8) (11610,)\n",
  117. "(3870, 8) (3870,)\n",
  118. "(5160, 8) (5160,)\n"
  119. ]
  120. }
  121. ],
  122. "source": [
  123. "from sklearn.model_selection import train_test_split\n",
  124. "\n",
  125. "x_train_all, x_test, y_train_all, y_test = train_test_split(\n",
  126. " housing.data, housing.target, random_state = 7)\n",
  127. "x_train, x_valid, y_train, y_valid = train_test_split(\n",
  128. " x_train_all, y_train_all, random_state = 11)\n",
  129. "print(x_train.shape, y_train.shape)\n",
  130. "print(x_valid.shape, y_valid.shape)\n",
  131. "print(x_test.shape, y_test.shape)\n"
  132. ]
  133. },
  134. {
  135. "cell_type": "code",
  136. "execution_count": 4,
  137. "metadata": {},
  138. "outputs": [],
  139. "source": [
  140. "from sklearn.preprocessing import StandardScaler\n",
  141. "\n",
  142. "scaler = StandardScaler()\n",
  143. "x_train_scaled = scaler.fit_transform(x_train)\n",
  144. "x_valid_scaled = scaler.transform(x_valid)\n",
  145. "x_test_scaled = scaler.transform(x_test)"
  146. ]
  147. },
  148. {
  149. "cell_type": "code",
  150. "execution_count": 5,
  151. "metadata": {},
  152. "outputs": [
  153. {
  154. "name": "stdout",
  155. "output_type": "stream",
  156. "text": [
  157. "Epoch 1/10\n",
  158. "363/363 [==============================] - 1s 2ms/step - loss: 1.3733 - val_loss: 0.7411\n",
  159. "Epoch 2/10\n",
  160. "363/363 [==============================] - 1s 3ms/step - loss: 0.6284 - val_loss: 0.6279\n",
  161. "Epoch 3/10\n",
  162. "363/363 [==============================] - 1s 3ms/step - loss: 0.5568 - val_loss: 0.5713\n",
  163. "Epoch 4/10\n",
  164. "363/363 [==============================] - 1s 2ms/step - loss: 0.5257 - val_loss: 0.5383\n",
  165. "Epoch 5/10\n",
  166. "363/363 [==============================] - 1s 2ms/step - loss: 0.4986 - val_loss: 0.5133\n",
  167. "Epoch 6/10\n",
  168. "363/363 [==============================] - 1s 2ms/step - loss: 0.4869 - val_loss: 0.5010\n",
  169. "Epoch 7/10\n",
  170. "363/363 [==============================] - 1s 2ms/step - loss: 0.4706 - val_loss: 0.4821\n",
  171. "Epoch 8/10\n",
  172. "363/363 [==============================] - 1s 2ms/step - loss: 0.4531 - val_loss: 0.4714\n",
  173. "Epoch 9/10\n",
  174. "363/363 [==============================] - 1s 2ms/step - loss: 0.4455 - val_loss: 0.4670\n",
  175. "Epoch 10/10\n",
  176. "363/363 [==============================] - 1s 3ms/step - loss: 0.4368 - val_loss: 0.4615\n"
  177. ]
  178. }
  179. ],
  180. "source": [
  181. "# RandomizedSearchCV\n",
  182. "# 1. 因为是sklearn的接口,转化为sklearn的model\n",
  183. "# 2. 定义参数集合\n",
  184. "# 3. 搜索参数\n",
  185. "\n",
  186. "def build_model(hidden_layers = 1,\n",
  187. " layer_size = 30,\n",
  188. " learning_rate = 3e-3):\n",
  189. " model = keras.models.Sequential()\n",
  190. " #因为不知道第一个输入的shape是多大的,因此我们需要单独从for循环里拿出来,for循环里的是输出再次作为输入\n",
  191. " model.add(keras.layers.Dense(layer_size, activation='relu',\n",
  192. " input_shape=x_train.shape[1:]))\n",
  193. " for _ in range(hidden_layers - 1):\n",
  194. " model.add(keras.layers.Dense(layer_size,\n",
  195. " activation = 'relu'))\n",
  196. " model.add(keras.layers.Dense(1))\n",
  197. " optimizer = keras.optimizers.SGD(learning_rate)\n",
  198. " model.compile(loss = 'mse', optimizer = optimizer)\n",
  199. " return model\n",
  200. "\n",
  201. "#KerasRegressor返回一个sk的model,build_fn是一个回调函数\n",
  202. "sklearn_model = tf.keras.wrappers.scikit_learn.KerasRegressor(\n",
  203. " build_fn = build_model)\n",
  204. "callbacks = [keras.callbacks.EarlyStopping(patience=5, min_delta=1e-2)]\n",
  205. "#下面只是先对sk封装tf模型的一个测试\n",
  206. "history = sklearn_model.fit(x_train_scaled, y_train,\n",
  207. " epochs = 10,\n",
  208. " validation_data = (x_valid_scaled, y_valid),\n",
  209. " callbacks = callbacks)"
  210. ]
  211. },
  212. {
  213. "cell_type": "code",
  214. "execution_count": 6,
  215. "metadata": {},
  216. "outputs": [
  217. {
  218. "data": {
  219. "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeMAAAEzCAYAAAACSWsXAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXgc1Z3u8e/pTa1dsmzJtmTJBhuMsYwtzA5G7FsCyRAGCDBAAs5kJTO5TJxkLpObm5lM4sxkcp94QpgECITEeAhJHHBCmMRmNcbG+4IXjBd5kzftai2tc/+oltRa3ZJaXXLr/TxPP13Vfbr6p/Pw8Pqcqj5lrLWIiIiIezxuFyAiIjLaKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXHbKMDbGPGmMqTTGbO7jfWOM+X/GmF3GmI3GmLL4lykiIpK8YhkZPw3c2M/7NwHTIo/5wI+HXpaIiMjoccowtta+Dpzop8ltwDPW8Q6QY4yZEK8CRUREkl08zhkXAvuj9isir4mIiEgMfIn8MmPMfJypbFJTU8+fNGlS3I7d1taGxxP/69GONFha2yyFGbrWrd1w9bV0pX5ODPVzYqifYceOHcesteN6ey8eYXwAiE7VoshrPVhrnwCeAJg7d65ds2ZNHL7esWLFCsrLy+N2vHaLlu9i4SvbWfXYdeSkBeJ+/NPRcPW1dKV+Tgz1c2Kon8EYs7ev9+Lxz5SlwN9Erqq+GKi21h6Kw3FHhLLiXADW7atyuRIREUlWpxwZG2N+BZQDY40xFcA/AX4Aa+3jwDLgZmAX0AA8OFzFuuG8Sdl4PYa1+05y1fR8t8sREZEkdMowttbefYr3LfD5uFU0wqQFfEwfn8nafSfdLkVERJJUQi/gOl2VFefy4toKwm0Wr8e4XY6IiCtaWlqoqKggFAoN+LPZ2dls27ZtGKoaeYLBIEVFRfj9/pg/ozCOQVlJDs++s5cdR2o5Z0KW2+WIiLiioqKCzMxMJk+ejDEDG5jU1taSmZk5TJWNHNZajh8/TkVFBVOmTIn5c6P7OvMYtV/EpalqERnNQqEQeXl5Aw7i0cQYQ15e3oBnDxTGMSgek0ZeeoC1e3VFtYiMbgriUxtMHymMY2CMYU5xLus0MhYRcVVGRobbJQwLhXGMykpy2H2snpP1zW6XIiIiSUZhHKOOxT/2a3QsIuI2ay2PPvooM2fOpLS0lOeffx6AQ4cOMW/ePGbPns3MmTN54403CIfDPPDAAx1tf/CDH7hcfU+6mjpGs4oii3/sreLq6QVulyMiMqq9+OKLrF+/ng0bNnDs2DEuuOAC5s2bxy9/+UtuuOEGvvGNbxAOh2loaGD9+vUcOHCAzZs3A1BVNfKu/1EYxygt4OOcCVr8Q0QE4P/8fgtbD9bE3D4cDuP1evttM2NiFv/00XNjOt6bb77J3XffjdfrpaCggCuvvJLVq1dzwQUX8KlPfYqWlhY+9rGPMXv2bM444wx2797NF7/4RW655Rauv/76mOtOFE1TD0BZcS4b9lcRbrNulyIiIr2YN28er7/+OoWFhTzwwAM888wz5ObmsmHDBsrLy3n88cd56KGH3C6zB42MB6CsOJdnVu5l++FaZkzU4h8iMnrFOoJtF+9FP6644gp+8pOfcP/993PixAlef/11Fi5cyN69eykqKuLhhx+mqamJtWvXcvPNNxMIBLj99ts5++yzuffee+NWR7wojAcgevEPhbGIiHs+/vGPs3LlSs477zyMMXzve99j/Pjx/PznP2fhwoX4/X4yMjJ45plnOHDgAA8++CBtbW0AfOc733G5+p4UxgMwaUwqYzMCrN13knsvLnG7HBGRUaeurg5w1n9YuHAhCxcu7PL+/fffz/3339/jc2vXrk1IfYOlc8YD0Ln4x8i7Ek9ERE5fCuMBKivO5cNj9ZzQ4h8iIhInCuMBKivOAdDSmCIiEjcK4wGaVZSDz2P0e2MREYkbhfEApQa8nDMhS3dwEhGRuFEYD0JZcQ4bKqpoDbe5XYqIiCQBhfEglJXk0tAcZvuRWrdLERGRJKAwHoTOxT80VS0iMpL1d//jPXv2MHPmzARW0zeF8SAU5aYyNiOFdXt1EZeIiAydwngQjDGUFefoimoRkQRbsGABixYt6tj/5je/ybe//W2uueYaysrKKC0t5Xe/+92AjxsKhXjwwQcpLS1lzpw5LF++HIAtW7Zw4YUXMnv2bGbNmsXOnTupr6/nlltu4bzzzmPmzJkd91IeCi2HOUhlJbn8aesRjtc1kZeR4nY5IiKJ9YcFcHhTzM1Tw63gPUXkjC+Fm/613yZ33nknX/7yl/n85z8PwJIlS3jllVf40pe+RFZWFseOHePiiy/m1ltvxRgTc32LFi3CGMOmTZt4//33uf7669mxYwePP/44jzzyCPfccw/Nzc2Ew2GWLVvGxIkTefnllwGorq6O+Xv6opHxILWfN9bSmCIiiTNnzhwqKys5ePAgGzZsIDc3l/Hjx/P1r3+dWbNmce2113LgwAGOHDkyoOO++eabHXdzmj59OiUlJezYsYNLLrmEf/mXf+G73/0ue/fuJTU1ldLSUl599VW++tWv8sYbb5CdnT3kv0sj40GaVZTdsfjHtTMK3C5HRCSxTjGC7a4xjrdQvOOOO3jhhRc4fPgwd955J8899xxHjx7lvffew+/3M3nyZEKhUFy+65Of/CQXXXQRL7/8MjfffDM/+clPuPrqq1m7di3Lli3jH//xH7nmmmt47LHHhvQ9CuNBCvq9zJiYpfPGIiIJduedd/Lwww9z7NgxXnvtNZYsWUJ+fj5+v5/ly5ezd+/eAR/ziiuu4LnnnuPqq69mx44d7Nu3j7PPPpvdu3dzxhln8KUvfYl9+/axceNGpk+fzpgxY7j33nvJycnhpz/96ZD/JoXxEJQV5/L86v20htvweTXjLyKSCOeeey61tbUUFhYyYcIE7rnnHj760Y9SWlrK3LlzmT59+oCP+bnPfY7PfvazlJaW4vP5ePrpp0lJSWHJkiU8++yz+P3+junw1atX8+ijj+LxePD7/fz4xz8e8t+kMB6COcU5PP32Ht4/XMvMwqGfMxARkdhs2tR58djYsWNZuXJlr+3a73/cm8mTJ7N582YAgsEgTz31VI82CxYsYMGCBV1eu+GGG7jhhhsGU3afNJwbgs6LuDRVLSIig6eR8RAU5aYyLjOFtfuquO8St6sREZHebNq0ifvuu6/LaykpKaxatcqlinpSGA+BFv8QERn5SktLWb9+vdtl9EvT1ENUVpzL3uMNHKtrcrsUEZFhZ611u4QRbzB9pDAeorISLf4hIqNDMBjk+PHjCuR+WGs5fvw4wWBwQJ/TNPUQlRZ2Lv5xnRb/EJEkVlRUREVFBUePHh3wZ0Oh0IAD6nQVDAYpKioa0GcUxkMU9Hs5d2IWa3UHJxFJcn6/nylTpgzqsytWrGDOnDlxrih5aJo6DuYU57KxoprWcJvbpYiIyGlIYRwHZSW5NLaEef9wrduliIjIaUhhHAdlxTkA+omTiIgMisI4DgpzUsnPTNF5YxERGRSFcRw4i3/kslY/bxIRkUFQGMdJWUkO+05o8Q8RERk4hXGctN80QlPVIiIyUArjOJlZmI3fazRVLSIiA6YwjpOg38uMidm6olpERAZMYRxHZcU5bKyookWLf4iIyADEFMbGmBuNMduNMbuMMQt6eb/YGLPcGLPOGLPRGHNz/Esd+cqKcwm1tPH+IS3+ISIisTtlGBtjvMAi4CZgBnC3MWZGt2b/CCyx1s4B7gL+M96Fng7a7+CkqWoRERmIWEbGFwK7rLW7rbXNwGLgtm5tLJAV2c4GDsavxNPHxOwgBVkpCmMRERkQc6r7UhpjPgHcaK19KLJ/H3CRtfYLUW0mAH8CcoF04Fpr7Xu9HGs+MB+goKDg/MWLF8fr76Curo6MjIy4HW+wfrQuxN6aNhZemeZ2KcNmpPR1slM/J4b6OTHUz3DVVVe9Z62d29t78bqF4t3A09bafzPGXAI8a4yZaa3tciWTtfYJ4AmAuXPn2vLy8jh9vXN7rngeb7B2enbzz8u2ce75lzAuM8XtcobFSOnrZKd+Tgz1c2Kon/sXyzT1AWBS1H5R5LVonwaWAFhrVwJBYGw8CjzdlJXophEiIjIwsYTxamCaMWaKMSaAc4HW0m5t9gHXABhjzsEJ46PxLPR0ce7E9sU/FMYiIhKbU4axtbYV+ALwCrAN56rpLcaYbxljbo00+wrwsDFmA/Ar4AF7qpPRSSro93LuxGzW7dVKXCIiEpuYzhlba5cBy7q99ljU9lbgsviWdvoqK87ll+/upSXcht+rdVVERKR/SophUFaSQ6iljW2HatwuRURETgMK42GgOziJiMhAKIyHwcScVMZnBXUHJxERiYnCeJiUleToimoREYmJwniYlBXnUnGykcrakNuliIjICKcwHiZzOs4ba6paRET6pzAeJjMLswh4PazTVLWIiJyCwniYpPi8nFuYpfPGIiJySgrjYVRWnMvGimqaW9tO3VhEREat5AjjxpMUVrwMbWG3K+mirDiXplYt/iEiIv1LjjDe8DzTdj0BT38ETnzodjUddAcnERGJRXKE8UWfYdv0L8ORLfDjy2DNkzAC7lMxITuVidla/ENERPqXHGFsDEfGXwWfexsmXQgv/R384naoOeh2ZcwpydWymCIi0q/kCON22UVw32/g5u/DvpXwnxfDhuddHSWXFedyoKqRyhot/iEiIr1LrjAGMAYufBj+9k0Ydw78Zj4suQ/qj7lSTlmxzhuLiEj/ki+M2+WdCQ8ug+u+BTtegUUXwbaXEl7GuROzCfg8Om8sIiJ9St4wBvB44bJHYP5rkDURnr8HfvO30Ji4YAz4PJQWZuu8sYiI9Cm5w7hdwQx46M8w7x9g4xL48aXwwV8S9vVlxTlsPKDFP0REpHejI4wBfAG4+hvw0KsQSIdnPw4vfwWa64f9q8uKc2lubWOrFv8QEZFejJ4wbld4PnzmdbjkC7D6Z87vkve9M6xfWVbi3MHpPU1Vi4hIL0ZfGAP4U+GGf4YHXgIbhidvhFcfg5bh+flRQVaQwpxUXVEtIiK9Gp1h3G7y5fDZt+H8++GtH8IT5XBow7B81ZziHNZpZCwiIr0Y3WEMkJIJH/0h3PMCNJ6E/7oaXvsehFvj+jVlxbkcrA5xuFqLf4iISFcK43bTroPPrYQZH4Pl/ww/uw6Obo/b4dvPG2uqWkREulMYR0sbA5/4GdzxNJzcA49fAW//CNqG/pOkGROySPF59HtjERHpQWHcm3M/Dp97B868Gv70Dfj5R5xwHoKOxT80MhYRkW4Uxn3JLIC7fwW3/Scc3uT8BOq9p4d004myklw2H6ihqTUcvzpFROS0pzDujzEw5x7niuvCMvj9I/DcHVBzaFCHKyvOoTncxpaDWvxDREQ6KYxjkTMJ7vsd3LQQ9rzp3Jpx438PeJRcVhy5iEvnjUVEJIrCOFYeD1w037k149hp8OJD8N/3Q/3xmA+RH1n8Y53u4CQiIlEUxgM1dip86hW45p/g/WXOKHn7H2L+eFlJri7iEhGRLhTGg+HxwhV/D/NXQEYB/Oou+O3nIFR9yo+WFedwqDrEoerGYS9TRERODwrjoRg/Ex7+C1zxv2DDr5wrrnev6PcjneeNNVUtIiIOhfFQ+QJwzf+GT78KvhR45jZY9ig0N/Ta/Jz2xT80VS0iIhEK43gpmgufeQMu+iy8+wQ8fjnsf7dHs4DPw6wiLf4hIiKdFMbxFEiDm/4V7v89hFvgyRvgf74JrU1dmpUV57JFi3+IiEiEwng4TJkHn30LZt8Db/7AuRPU4U0db88pzqU53MbmA1r8Q0REFMbDJ5gFt/0I7n4e6o/CE1fB6wsh3EpZSQ4A6zRVLSIiKIyH39k3OjedOOej8Jdvw5PXk9+0n6LcVJ03FhERQGGcGGlj4I6n4BNPwond8Pjl/F3mn1m354TblYmIyAigME6kmbc7o+QpV3J75Y/4QdP/5o3fP01TqPefQYmIyOigME60zPHwyeepuf4HTPce4Ir3HqHlX89k66JPUrXxZecqbBERGVV8bhcwKhlD1qWfou2Ce9n01u+pXvM8syqXk/Xiy9T/LovQ1FvIu+gumHyFs/SmiIgkNYWxizz+AKXlt0P57Xx45AQvvfLfZH2wlPL3fwPbf0VTSh6+0o/jLb0dJl3s3DlKRESSjsJ4hJhSMIYpf/MZakOf4sV3d7HrrRe5qGEF16x5Bu+anxLOmIB35l/BzL+CwvPBGLdLFhGROIkpjI0xNwI/BLzAT621/9pLm78GvglYYIO19pNxrHPUyAz6+Zt559B2+ddZseMhvvjGVlI/fJVba96hfNUT+N5ZBDnFcG4kmMfPUjCLiJzmThnGxhgvsAi4DqgAVhtjllprt0a1mQZ8DbjMWnvSGJM/XAWPFh6P4erpBVw9vYCdR+by85V7+Pp7O5jXtop7Gt/jvLd/hOet/4C8qZFgvh3yp7tdtoiIDEIsI+MLgV3W2t0AxpjFwG3A1qg2DwOLrLUnAay1lfEudDSbVpDJtz9WyqPXT2fJmjl8ceUe6huPcFfGBu4Jr6Hwje9jXv8e5M/oHDHnnel22SIiEqNYwrgQ2B+1XwFc1K3NWQDGmLdwprK/aa39Y1wqlA7ZaX4enncGn7p8Cv+z7QhPvTWZH+++giJ/Df8waTvXtr1F2vJvw/Jvw4TzOoM5p9jt0kVEpB/GWtt/A2M+AdxorX0osn8fcJG19gtRbV4CWoC/BoqA14FSa21Vt2PNB+YDFBQUnL948eK4/SF1dXVkZGTE7Xini/21bby6t4WVB1tpaYN5uSf4dNZqZje8TXbtTgCqs86mMv8Kjo67lOaUvCF/52jt60RTPyeG+jkx1M9w1VVXvWetndvbe7GE8SU4I90bIvtfA7DWfieqzePAKmvtU5H9PwMLrLWr+zru3Llz7Zo1awb6t/RpxYoVlJeXx+14p5sT9c0sXr2PZ1fu5VB1iOIxaXx+tpfb/O8S3P7byF2jDJRc6oyWz7kNMsYN6rtGe18nivo5MdTPiaF+BmNMn2Ecyw9XVwPTjDFTjDEB4C5gabc2vwXKI182FmfaevegK5YBG5Me4HPlU3n9H65i0SfLyM9M4at/qaVs+Uwem/A4+z75GpR/DeqPwctfgX87C575GKx9Bhq0RraIiJtOec7YWttqjPkC8ArO+eAnrbVbjDHfAtZYa5dG3rveGLMVCAOPWmuPD2fh0ju/18MtsyZwy6wJbKqo5qm3P2Txu/t5ZmUbV551FQ9e+wDzso/i2fob2PIiLP0ivPT3cOZVzhXZZ9/s3P5RREQSJqbfGVtrlwHLur32WNS2Bf4+8pARorQom3//69l87aZz+OWqffxi1V4eeHoNZ4xN5/5L7+P2+QvIOLEZNr8IW34Dv/kMeFNg2nXOVPZZN0Ig3e0/Q0Qk6WkFrlFgXGYKj1w7jc+Wn8kfNh/iybf28E9Lt/D9V7Zzx9xJ3H/pAkqu+xZUrO4M5vdfAn+aE8gz/wqmXgf+oNt/iohIUlIYjyIBn4fbZhdy2+xC1u07yVNv7eGZlXt46u0PuWZ6Pg9edgaX3vgdzA3/DPtWOsG89bfOdHYgE6bfAud+HF9Lq9t/iohIUlEYj1JzinOZU5zLN245h1+8s5dfrtrH/2xbxVkFGTxw6RQ+PucSUidfDjd9D/a87gTztqWwcTGXA2ydAhNnO79nnjDb2U7NdfvPEhE5LSmMR7mCrCBfuf5sPn/VVH6/4SBPvbWHr/9mE9/94/vcdcEk7rukhKIzr4Yzr4Zb/h32vsXuN3/NGcFqOPCeM6XdLqckEtCzO5/Txrj3x4mInCYUxgJA0O/ljrmT+MT5RazZe5Kn3vqQn775If/1xm6unzGeBy+bzIVTxmDOvIp9+w1ntP9esOEEHFoPB9fDoQ3O9tbfdR44p7hrOE+co4AWEelGYSxdGGO4YPIYLpg8hgNVjTy7ci+LV+/jj1sOM2NCFvdfWoKnoQ1rLcYYJ1jbR87tGk86wXxwfWdQb4v6aXr2JGd6e+JsmDDHeU4fm/g/VkRkhFAYS58Kc1JZcNN0HrlmGr9df4Cn39rDV3+9CYBvvfsnZkzIYsbErI7nafmZBHwe59zxGeXOo11jVefIuT2k33+p8/2sop5T3INcIUxE5HSjMJZTSg14ufvCYu66YBKbD9Tw6+Xv0po5nq0Ha1j87n4aW8IA+L2GqfmZXUN6QhbZaX5IzYEzrnQe7Rqr4PDGrlPcXQK6sNsU92zI0N05RST5KIwlZsYYSouyOT7JT3l5KQDhNsve4/VsPVTD1oM1bD1Uwxs7j/LrtRUdnyvMSe0ygp4xIYui3FRMag5Mmec82oVqogI6MorevgyIrKGeOaFnQGeOT2AviIjEn8JYhsTrMZwxLoMzxmXwkVkTO14/WtvEtkM1XUL6z9uO0NaeqUFf79PcwSyYfLnzaNdUC4c2dp3i3vFHOgI6Y3zPKe6sCYnrBBGRIVIYy7AYl5nCuMxxzDur87xvY3OY7UdqI+FcPYBp7kyYfJnzaNdUC4c3dw3onX8C2+a8n1HghHL7hWLjpjs/vfLqP3kRGXn0fyZJmNSAl9mTcpg9KafjtcFPc2dgSi6Bkks6v6C53rlVZPQU965XOwPa44cxUyBvqvMYOy2yPc25mtuYRHWFiEgXCmNxVdynuYsvhuKLO7+guR6ObIFjO+H4Tji+C47tgl3/A+HmznbB7M5gzpsKYyOBPeZMCKQlqDdEZLRSGMuIFNdp7kkXwqQLu35BWxiq9zvB3BHSO2HPm7Bxcde22ZMg78xuQT0NsovA4x3urhCRUUBhLKeNoUxzn1WQwbSCTKbmZ3Q8snInQ+5kmHZt1y9qrofjHzgB3f44thM2Pg9NNZ3tvCmRkI4Edce091StMiYiA6IwltNarNPcOyvreOuD4zS3tnW0GZ8VZFpBBmeOy2BaQQbT8jOZlp9Bbno6TJjlPKJZC/VHe055V74P2/8AbVF3s0odEwnnaU5Yt2+PmQK+lOHuFhE5zSiMJSn1Ns0dbrPsP9HArso6dlbWsbOyll2VdSxZs5+G5nBHu7EZgR4BPTU/g3GZKZiMfGfhkegruwHCLXByb2QkHX1u+lVY/4vOdsbjrNfdfn66/dx03jTImqiLyERGKYWxjBpej2Hy2HQmj03n2hkFHa+3tVkO1YTYecQJ551H6th1tI7frT9IbahztJsV9DGtoDOcp+Y7U98Ts4MYr98J1rFTgRu7fnGoJhLSHzhBfSwS1ntXQkt9Zzt/Wpdz0xMP1cDmE86Ud+qYzmddUCaSdBTGMup5PIbCnFQKc1IpP7tzuU1rLUdrm9hZWRcZTdey80gdr249wuLV+zvapQe8TM3P4Mz8zpH0tIIMinLT8HoMBLOgsMx5RLMWag5GjaY/cIL64FrY+lvOsm2w8/GeBfuCUeGc2zOse3sO5oDHM1xdKCJDpDAW6YMxhvysIPlZQS6b2vWuUifqm7sE9K7KOt7adYwX1x7oaJPi83DGuAwnnCMBPTU/g5K8dPxejzMlnV3oPKLX7AZobebtP7/EpbOnQ+MJ51aVXZ5Pdu5XboOG487dsmwbvTPO+uC9hnVu3yHuT41zr4pIbxTGIoMwJj3AhVPGcOGUrldN14Ra2FVZx64jneek1+47ydINBzva+DyGKWPTI+HsXOE9LT+DKWPTCfojP5XyBWhOGQMFM2Ivqq0NmqojgX2ylwCPeq496Pz+uvEEtDT0fUx/2qkDO3qUnjkeAukD6UoRQWEsEldZQT9lxbmUFed2eb2huZUPKuvZddQZSe+srGPboVr+uPlwx0ImHgMleemcOc4ZQddWtlC17gDZaX5yUv3kpAXISfWTlep3pr+780RuX5ma2/O9/rSE+g7t7qF+eFPn6+1rg3cXzHF+g51d5Nx5K7vQuUVmdqGzn1UIvsDAahRJcgpjkQRIC/goLcqmtCi7y+uhljB7jtd3BPSuyGj6tR2VtIQtz21b3+vxsoI+J5zT/GRHBXX3/dx0P9mpne383l7OG/uD4J/oXM0dq7Y2CFV1C+vjUHsIqg9AzQHnef+qSHBHM84V6V2CuqhraGcUaEEVGVUUxiIuCvq9TB+fxfTxWV1ebw238Yc/v8a5cy6gqrGFqoZmqhpanEdjC9UNzZHXnf39Jxqc1xtbsH0MWAEyUnyRsI48UgNRI+/u+50h3jF93s7jcaam08Y4V4D3p7k+EtAVUUFd4TyOboddf+l6VTmAx+fcLrO30XV2kbOdNkY/BZOkoTAWGYF8Xg+ZAWdBk4Foa7PUhlqpamzuCOqqhmaq24O7oYWqxmaqI++9X13T8V5rW98pHvR7yEmNHon7O/bbQzsn1U9eRgr5mSnkZ6WQFoj87yWQDuPOch69sdYZZXcE9f6uo+sDa2Db0q5riQP4Up3RfHahs2Rpb1Piwazev1NkhFEYiyQRj8eQneYnO81PSV7sn7PWUt8c7hiBVze2cDJqu2Nk3thCdUMLHx6rp6qhiqqGFprDvV/BnZHi6wjm/Mxg1+2o58wUH6b9XPf4mb0X2NbmrH7WfXTd/vzBcqg73PNq8pTszmCOHlVHn78WGQEUxiKCMYaMFB8ZKT6KBnD9l7WWUEsbVY3NnKxv4VhdE5W1TVTWhqisaeJobRNHakKs319FZW2IUEvP4A76PR1hXZAVZFy3AC/Icp5zMvIxmQVQeH7vxYRbnXPW3YO6fYr84DpoONbjY5d702FdHqRmOxefBSPPqVHbwezIfvR2tn76JXGjMBaRQTPGkBrwkhpIZUJ2/8FkraW2qZXKms6w7nx2trcdruG1HU3UNbX2+HzA64ksc5pCQS+jbef1fPKKivBE30YzWkuoW1gf4PCO9RSNzYTGKghVw4ndznNjVc9z2d15U7qGc/ew7m8/JUsXqUkHhbGIJIQxhqygn6ygn6n5/Z8Lb2hu7RLSRyLBfTTy2ofH6ln14QmqGlp6fNbrMYzNCESFdbcp8sw88vMmMrYkBb/Xwy67gqLy8t4LCbc4wdwezqHIoz24Q1VR71U7I+8TH+5jS44AAA00SURBVHTu23Dvx3V6xAnkYHaMo/Ko/UC6c8MRb0AXsSUJhbGIjDhpAR+Tx/qYPLb/BURCLWGO1joBfbQ2RGVkWrw9yA9Wh9hQUcXx+uYeV5kbA3npAdJMK1P3rKYgK8j4rCDjs52p8fHZzn52Wh4mfWzvBfTHWmiu6xrW/QV5qMoZlbfvn2pU3s7j7wxmb8D5Dbc3pdt2e5vo7fb2vXzWF2nXYzvSrst292NEtjXqHxCFsYictoJ+L5PGpDFpTP83z2gJtznns2u6ntOurA2x9cODHK4OsWG/E9o9v8NDQVYwKqyDPYI7PzNIwNftN9zGQEqm88guGvgf19rs3D+7I6xPRk2fN0Brk3OFebjZaRtu6rbdEmkT2W6od57DTZHX27cjxwg3DbzG/hhvlyC/uBXYlO2sre4POs++oNPGn+o8+1L72Y/6TJfP97LvPf2i7fSrWERkgPxeDxOyez+vvWLFCcrLrwCgqTVMZY0zuj5cE+JwdSiy3cSRaudCtMNbQl3ui91ubEagI6QLIqPq7ttZqT5MrNPKvgD4xsJgRuWDYW0koNsDvqmf7V5Cvb9/CLQ2UVWxl/Fjc5zXWxud54bjXfdbGqE15Dz3tcJbLDy+gYV3X/8ACGTA7Lvj1sX9URiLiESk+E490rbWUtXQ4oR1TYgj1ZHnSHgfrA6xdt9JTvZyPjvV742MrFP6DO1xmSm9r5Q23IyJ/ANgeJYqfX/FCsb3dW6+u/Z/GLSGOh8tofjtN57s4/3Grj+PC+YojEVERiJjDLnpAXLTA5wzoe9FRUItzii7e2i3b6/Ze5LKmqYev9M2BsZmRMI6MhXeue0E9pj0AGkBH0G/J/aR9umkyz8MErxwS/s/AlpCPReaGUYKYxGRYRD0eynOS6M4r/9R9on65qiRdVOX4K442cCavb1fNQ5OZqUHfKQFvJGHj/QUL6kBH+ld9r0d7dJTfJ1tA17SUjo/nx7wkZbiJeBN0pCPhdfvPFIyE/q1CmMREZcYY8jLSCEvI4VzJ2b32S7UEu6YBj9cE6K6sYX6pjANza00NDvP0fvVjS0cqmrsfK853Ot57r54PaYznANe0lKiwrtHqHeGflrA1+t+esDX73KrojAWERnxgn4vJXnplOQN/l7RLeE2GprDNDaHqW9upaHJee6+31u4NzSHqW9q5VhdMw3NDR37Dc3hAYVsyl/+4Kz0FvSRHnCe21d+S0/xkdnldS8ZKX7SU7xdPpMZdNq6cl59GCmMRURGAb/XQ3aqh+xUf1yP29za1jH6bogEdM+wD7P5/Z3kT5xEbVMr9U2t1IVaqWtq5UhNiN1NrdQ1halraul1ydTepPg8gwr29jCPbjsSgl1hLCIigxbweQj4AuT0/1NvVrTupbz8nFMerzXcRn1TmLrmzsCu6xbgHftR27WhViprQ+w+Orhgbw/p6GAfkx7g+3ecF9MxhkphLCIiI4bP6yE7zUN22tBH8L0Fe3SI14W67XcL9hO9LAIzXBTGIiKSlOIZ7MPN/YlyERGRUU5hLCIi4jKFsYiIiMsUxiIiIi6LKYyNMTcaY7YbY3YZYxb00+52Y4w1xsyNX4kiIiLJ7ZRhbIzxAouAm4AZwN3GmBm9tMsEHgFWxbtIERGRZBbLyPhCYJe1dre1thlYDNzWS7v/C3wXCMWxPhERkaQXSxgXAvuj9isir3UwxpQBk6y1L8exNhERkVFhyIt+GGM8wL8DD8TQdj4wH6CgoIAVK1YM9es71NXVxfV40jf1dWKonxND/ZwY6uf+xRLGB4BJUftFkdfaZQIzgRWR+1+OB5YaY2611q6JPpC19gngCYC5c+fa8vLywVfezYoVK4jn8aRv6uvEUD8nhvo5MdTP/Ytlmno1MM0YM8UYEwDuApa2v2mtrbbWjrXWTrbWTgbeAXoEsYiIiPTulGFsrW0FvgC8AmwDllhrtxhjvmWMuXW4CxQREUl2MZ0zttYuA5Z1e+2xPtqWD70sERGR0UMrcImIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4jKFsYiIiMsUxiIiIi5TGIuIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4jKFsYiIiMsUxiIiIi5TGIuIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4jKFsYiIiMsUxiIiIi5TGIuIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4rKYwtgYc6MxZrsxZpcxZkEv7/+9MWarMWajMebPxpiS+JcqIiKSnE4ZxsYYL7AIuAmYAdxtjJnRrdk6YK61dhbwAvC9eBcqIiKSrGIZGV8I7LLW7rbWNgOLgduiG1hrl1trGyK77wBF8S1TREQkefliaFMI7I/arwAu6qf9p4E/9PaGMWY+MB+goKCAFStWxFZlDOrq6uJ6POmb+jox1M+JoX5ODPVz/2IJ45gZY+4F5gJX9va+tfYJ4AmAuXPn2vLy8rh994oVK4jn8aRv6uvEUD8nhvo5MdTP/YsljA8Ak6L2iyKvdWGMuRb4BnCltbYpPuWJiIgkv1jOGa8GphljphhjAsBdwNLoBsaYOcBPgFuttZXxL1NERCR5nTKMrbWtwBeAV4BtwBJr7RZjzLeMMbdGmi0EMoD/NsasN8Ys7eNwIiIi0k1M54yttcuAZd1eeyxq+9o41yUiIjJqaAUuERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXxRTGxpgbjTHbjTG7jDELenk/xRjzfOT9VcaYyfEuVEREJFmdMoyNMV5gEXATMAO42xgzo1uzTwMnrbVTgR8A3413oSIiIskqlpHxhcAua+1ua20zsBi4rVub24CfR7ZfAK4xxpj4lSkiIpK8YgnjQmB/1H5F5LVe21hrW4FqIC8eBYqIiCQ7XyK/zBgzH5gf2a0zxmyP4+HHAsfieDzpm/o6MdTPiaF+Tgz1M5T09UYsYXwAmBS1XxR5rbc2FcYYH5ANHO9+IGvtE8ATMXzngBlj1lhr5w7HsaUr9XViqJ8TQ/2cGOrn/sUyTb0amGaMmWKMCQB3AUu7tVkK3B/Z/gTwF2utjV+ZIiIiyeuUI2Nrbasx5gvAK4AXeNJau8UY8y1gjbV2KfAz4FljzC7gBE5gi4iISAxiOmdsrV0GLOv22mNR2yHgjviWNmDDMv0tvVJfJ4b6OTHUz4mhfu6H0WyyiIiIu7QcpoiIiMuSIoxPtVynDJ0xZpIxZrkxZqsxZosx5hG3a0pmxhivMWadMeYlt2tJVsaYHGPMC8aY940x24wxl7hdU7Iyxvxd5P8bm40xvzLGBN2uaaQ57cM4xuU6Zehaga9Ya2cAFwOfVz8Pq0eAbW4XkeR+CPzRWjsdOA/197AwxhQCXwLmWmtn4lwIrIt8uzntw5jYluuUIbLWHrLWro1s1+L8j6v7SmwSB8aYIuAW4Kdu15KsjDHZwDycX4JgrW221la5W1VS8wGpkXUo0oCDLtcz4iRDGMeyXKfEUeSuXHOAVe5WkrT+A/gHoM3tQpLYFOAo8FTkdMBPjTHpbheVjKy1B4DvA/uAQ0C1tfZP7lY18iRDGEsCGWMygF8DX7bW1rhdT7IxxnwEqLTWvud2LUnOB5QBP7bWzgHqAV1vMgyMMbk4s5VTgIlAujHmXnerGnmSIYxjWa5T4sAY48cJ4uestS+6XU+Sugy41RizB+eUy9XGmF+4W1JSqgAqrLXtszsv4ISzxN+1wIfW2qPW2hbgReBSl2sacZIhjGNZrlOGKHJLzJ8B26y1/+52PcnKWvs1a22RtXYyzn/Lf7HWahQRZ9baw8B+Y8zZkZeuAba6WFIy2wdcbIxJi/x/5Bp0sVwPCb1r03Doa7lOl8tKRpcB9wGbjDHrI699PbI6m8jp6IvAc5F/xO8GHnS5nqRkrV1ljHkBWIvzq4x1aDWuHrQCl4iIiMuSYZpaRETktKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGX/X+FKIUr0iLK4AAAAABJRU5ErkJggg==\n",
  220. "text/plain": [
  221. "<Figure size 576x360 with 1 Axes>"
  222. ]
  223. },
  224. "metadata": {
  225. "needs_background": "light"
  226. },
  227. "output_type": "display_data"
  228. }
  229. ],
  230. "source": [
  231. "def plot_learning_curves(history):\n",
  232. " pd.DataFrame(history.history).plot(figsize=(8, 5))\n",
  233. " plt.grid(True)\n",
  234. " plt.gca().set_ylim(0, 1)\n",
  235. " plt.show()\n",
  236. "plot_learning_curves(history)"
  237. ]
  238. },
  239. {
  240. "cell_type": "code",
  241. "execution_count": 7,
  242. "metadata": {},
  243. "outputs": [
  244. {
  245. "name": "stdout",
  246. "output_type": "stream",
  247. "text": [
  248. "Epoch 1/5\n",
  249. "291/291 [==============================] - 2s 6ms/step - loss: 4.8169 - val_loss: 4.5730\n",
  250. "Epoch 2/5\n",
  251. "291/291 [==============================] - 1s 2ms/step - loss: 4.1151 - val_loss: 3.9278\n",
  252. "Epoch 3/5\n",
  253. "291/291 [==============================] - 1s 2ms/step - loss: 3.5486 - val_loss: 3.4186\n",
  254. "Epoch 4/5\n",
  255. "291/291 [==============================] - 1s 2ms/step - loss: 3.0920 - val_loss: 3.0067\n",
  256. "Epoch 5/5\n",
  257. "291/291 [==============================] - 1s 2ms/step - loss: 2.7177 - val_loss: 2.6650\n",
  258. "73/73 [==============================] - 0s 1ms/step - loss: 2.2921\n",
  259. "Epoch 1/5\n",
  260. "291/291 [==============================] - 1s 2ms/step - loss: 5.0068 - val_loss: 4.7979\n",
  261. "Epoch 2/5\n",
  262. "291/291 [==============================] - 1s 2ms/step - loss: 4.1171 - val_loss: 4.0133\n",
  263. "Epoch 3/5\n",
  264. "291/291 [==============================] - 1s 2ms/step - loss: 3.4675 - val_loss: 3.4350\n",
  265. "Epoch 4/5\n",
  266. "291/291 [==============================] - 1s 4ms/step - loss: 2.9901 - val_loss: 3.0049\n",
  267. "Epoch 5/5\n",
  268. "291/291 [==============================] - 1s 5ms/step - loss: 2.6353 - val_loss: 2.6800\n",
  269. "73/73 [==============================] - 0s 2ms/step - loss: 2.5213\n",
  270. "Epoch 1/5\n",
  271. "291/291 [==============================] - 1s 2ms/step - loss: 9.3036 - val_loss: 8.4139\n",
  272. "Epoch 2/5\n",
  273. "291/291 [==============================] - 1s 3ms/step - loss: 7.2816 - val_loss: 6.7408\n",
  274. "Epoch 3/5\n",
  275. "291/291 [==============================] - 1s 2ms/step - loss: 5.8961 - val_loss: 5.5513\n",
  276. "Epoch 4/5\n",
  277. "291/291 [==============================] - 1s 3ms/step - loss: 4.8929 - val_loss: 4.6717\n",
  278. "Epoch 5/5\n",
  279. "291/291 [==============================] - 1s 3ms/step - loss: 4.1412 - val_loss: 4.0009\n",
  280. "73/73 [==============================] - 0s 1ms/step - loss: 3.7795\n",
  281. "Epoch 1/5\n",
  282. "291/291 [==============================] - 1s 2ms/step - loss: 5.4101 - val_loss: 4.9982\n",
  283. "Epoch 2/5\n",
  284. "291/291 [==============================] - 1s 2ms/step - loss: 4.4919 - val_loss: 4.2074\n",
  285. "Epoch 3/5\n",
  286. "291/291 [==============================] - 1s 3ms/step - loss: 3.8180 - val_loss: 3.6181\n",
  287. "Epoch 4/5\n",
  288. "291/291 [==============================] - 1s 2ms/step - loss: 3.3089 - val_loss: 3.1706\n",
  289. "Epoch 5/5\n",
  290. "291/291 [==============================] - 1s 2ms/step - loss: 2.9186 - val_loss: 2.8252\n",
  291. "73/73 [==============================] - 0s 1ms/step - loss: 2.6951\n",
  292. "Epoch 1/5\n",
  293. "291/291 [==============================] - 1s 3ms/step - loss: 5.8741 - val_loss: 5.5725\n",
  294. "Epoch 2/5\n",
  295. "291/291 [==============================] - 1s 2ms/step - loss: 4.7093 - val_loss: 4.5407\n",
  296. "Epoch 3/5\n",
  297. "291/291 [==============================] - 1s 2ms/step - loss: 3.8526 - val_loss: 3.7603\n",
  298. "Epoch 4/5\n",
  299. "291/291 [==============================] - 1s 2ms/step - loss: 3.2003 - val_loss: 3.1531\n",
  300. "Epoch 5/5\n",
  301. "291/291 [==============================] - 1s 2ms/step - loss: 2.6945 - val_loss: 2.6759\n",
  302. "73/73 [==============================] - 0s 1ms/step - loss: 2.5566\n",
  303. "Epoch 1/5\n",
  304. "291/291 [==============================] - 1s 2ms/step - loss: 4.2941 - val_loss: 4.4632\n",
  305. "Epoch 2/5\n",
  306. "291/291 [==============================] - 1s 3ms/step - loss: 3.9875 - val_loss: 4.1607\n",
  307. "Epoch 3/5\n",
  308. "291/291 [==============================] - 1s 4ms/step - loss: 3.7184 - val_loss: 3.8935\n",
  309. "Epoch 4/5\n",
  310. "291/291 [==============================] - 1s 3ms/step - loss: 3.4811 - val_loss: 3.6568\n",
  311. "Epoch 5/5\n",
  312. "291/291 [==============================] - 1s 3ms/step - loss: 3.2701 - val_loss: 3.4459\n",
  313. "73/73 [==============================] - 0s 6ms/step - loss: 3.5133\n",
  314. "Epoch 1/5\n",
  315. "291/291 [==============================] - 1s 4ms/step - loss: 3.6447 - val_loss: 3.4718\n",
  316. "Epoch 2/5\n",
  317. "291/291 [==============================] - 1s 3ms/step - loss: 3.4244 - val_loss: 3.2581\n",
  318. "Epoch 3/5\n",
  319. "291/291 [==============================] - 1s 2ms/step - loss: 3.2293 - val_loss: 3.0665\n",
  320. "Epoch 4/5\n",
  321. "291/291 [==============================] - 1s 2ms/step - loss: 3.0530 - val_loss: 2.8942\n",
  322. "Epoch 5/5\n",
  323. "291/291 [==============================] - 1s 2ms/step - loss: 2.8945 - val_loss: 2.7397\n",
  324. "73/73 [==============================] - 0s 1ms/step - loss: 2.6940\n",
  325. "Epoch 1/5\n",
  326. "291/291 [==============================] - 1s 2ms/step - loss: 6.7541 - val_loss: 6.7783\n",
  327. "Epoch 2/5\n",
  328. "291/291 [==============================] - 1s 2ms/step - loss: 6.0625 - val_loss: 6.0975\n",
  329. "Epoch 3/5\n",
  330. "291/291 [==============================] - 1s 2ms/step - loss: 5.4993 - val_loss: 5.5445\n",
  331. "Epoch 4/5\n",
  332. "291/291 [==============================] - 1s 2ms/step - loss: 5.0333 - val_loss: 5.0862\n",
  333. "Epoch 5/5\n",
  334. "291/291 [==============================] - 1s 2ms/step - loss: 4.6410 - val_loss: 4.7017\n",
  335. "73/73 [==============================] - 0s 1ms/step - loss: 4.4608\n",
  336. "Epoch 1/5\n",
  337. "291/291 [==============================] - 1s 2ms/step - loss: 6.0449 - val_loss: 5.9840\n",
  338. "Epoch 2/5\n",
  339. "291/291 [==============================] - 1s 2ms/step - loss: 5.5157 - val_loss: 5.4873\n",
  340. "Epoch 3/5\n",
  341. "291/291 [==============================] - 1s 2ms/step - loss: 5.0672 - val_loss: 5.0642\n",
  342. "Epoch 4/5\n",
  343. "291/291 [==============================] - 1s 2ms/step - loss: 4.6826 - val_loss: 4.6996\n",
  344. "Epoch 5/5\n",
  345. "291/291 [==============================] - 1s 2ms/step - loss: 4.3496 - val_loss: 4.3825\n",
  346. "73/73 [==============================] - 0s 1ms/step - loss: 4.3887\n",
  347. "Epoch 1/5\n",
  348. "291/291 [==============================] - 1s 2ms/step - loss: 5.0486 - val_loss: 5.3851\n",
  349. "Epoch 2/5\n",
  350. "291/291 [==============================] - 1s 2ms/step - loss: 4.5588 - val_loss: 4.8678\n",
  351. "Epoch 3/5\n",
  352. "291/291 [==============================] - 1s 2ms/step - loss: 4.1570 - val_loss: 4.4457\n",
  353. "Epoch 4/5\n",
  354. "291/291 [==============================] - 1s 2ms/step - loss: 3.8225 - val_loss: 4.0898\n",
  355. "Epoch 5/5\n",
  356. "291/291 [==============================] - 1s 2ms/step - loss: 3.5329 - val_loss: 3.7827\n",
  357. "73/73 [==============================] - 0s 1ms/step - loss: 3.1506\n",
  358. "Epoch 1/5\n",
  359. "291/291 [==============================] - 1s 2ms/step - loss: 3.7016 - val_loss: 1.8071\n",
  360. "Epoch 2/5\n",
  361. "291/291 [==============================] - 1s 2ms/step - loss: 1.3571 - val_loss: 1.1725\n",
  362. "Epoch 3/5\n",
  363. "291/291 [==============================] - 1s 2ms/step - loss: 1.0011 - val_loss: 0.9637\n",
  364. "Epoch 4/5\n",
  365. "291/291 [==============================] - 1s 2ms/step - loss: 0.8598 - val_loss: 0.8660\n",
  366. "Epoch 5/5\n",
  367. "291/291 [==============================] - 1s 2ms/step - loss: 0.7885 - val_loss: 0.8054\n",
  368. "73/73 [==============================] - 0s 1ms/step - loss: 0.7158\n",
  369. "Epoch 1/5\n",
  370. "291/291 [==============================] - 1s 5ms/step - loss: 4.2231 - val_loss: 2.7347\n",
  371. "Epoch 2/5\n",
  372. "291/291 [==============================] - 1s 2ms/step - loss: 2.1102 - val_loss: 1.9381\n",
  373. "Epoch 3/5\n",
  374. "291/291 [==============================] - 1s 2ms/step - loss: 1.6375 - val_loss: 1.6005\n",
  375. "Epoch 4/5\n",
  376. "291/291 [==============================] - 1s 2ms/step - loss: 1.4040 - val_loss: 1.4049\n",
  377. "Epoch 5/5\n",
  378. "291/291 [==============================] - 1s 2ms/step - loss: 1.2423 - val_loss: 1.2323\n",
  379. "73/73 [==============================] - 0s 1ms/step - loss: 1.1663\n",
  380. "Epoch 1/5\n",
  381. "291/291 [==============================] - 1s 3ms/step - loss: 2.3103 - val_loss: 1.2870\n",
  382. "Epoch 2/5\n",
  383. "291/291 [==============================] - 1s 5ms/step - loss: 1.0396 - val_loss: 1.0022\n",
  384. "Epoch 3/5\n",
  385. "291/291 [==============================] - 1s 5ms/step - loss: 0.8901 - val_loss: 0.9237\n",
  386. "Epoch 4/5\n",
  387. "291/291 [==============================] - 1s 4ms/step - loss: 0.8270 - val_loss: 0.8664\n",
  388. "Epoch 5/5\n",
  389. "291/291 [==============================] - 1s 5ms/step - loss: 0.7782 - val_loss: 0.8163\n",
  390. "73/73 [==============================] - 0s 2ms/step - loss: 0.7750\n",
  391. "Epoch 1/5\n",
  392. "291/291 [==============================] - 1s 3ms/step - loss: 1.9203 - val_loss: 1.3846\n",
  393. "Epoch 2/5\n",
  394. "291/291 [==============================] - 1s 2ms/step - loss: 0.9788 - val_loss: 0.9880\n",
  395. "Epoch 3/5\n",
  396. "291/291 [==============================] - 1s 2ms/step - loss: 0.8097 - val_loss: 0.8389\n",
  397. "Epoch 4/5\n",
  398. "291/291 [==============================] - 1s 4ms/step - loss: 0.7312 - val_loss: 0.7660\n",
  399. "Epoch 5/5\n",
  400. "291/291 [==============================] - 1s 4ms/step - loss: 0.6805 - val_loss: 0.7162\n",
  401. "73/73 [==============================] - 0s 1ms/step - loss: 0.7457\n",
  402. "Epoch 1/5\n",
  403. "291/291 [==============================] - 1s 3ms/step - loss: 3.5828 - val_loss: 2.8808\n",
  404. "Epoch 2/5\n",
  405. "291/291 [==============================] - 1s 3ms/step - loss: 2.1687 - val_loss: 1.9635\n",
  406. "Epoch 3/5\n",
  407. "291/291 [==============================] - 1s 3ms/step - loss: 1.5279 - val_loss: 1.4081\n",
  408. "Epoch 4/5\n"
  409. ]
  410. },
  411. {
  412. "name": "stdout",
  413. "output_type": "stream",
  414. "text": [
  415. "291/291 [==============================] - 2s 6ms/step - loss: 1.1076 - val_loss: 1.0160\n",
  416. "Epoch 5/5\n",
  417. "291/291 [==============================] - 1s 2ms/step - loss: 0.8330 - val_loss: 0.7905\n",
  418. "73/73 [==============================] - 0s 2ms/step - loss: 0.7793\n",
  419. "Epoch 1/5\n",
  420. "291/291 [==============================] - 1s 2ms/step - loss: 1.8419 - val_loss: 0.7693\n",
  421. "Epoch 2/5\n",
  422. "291/291 [==============================] - 1s 3ms/step - loss: 0.6910 - val_loss: 0.6971\n",
  423. "Epoch 3/5\n",
  424. "291/291 [==============================] - 2s 6ms/step - loss: 0.6449 - val_loss: 0.6545\n",
  425. "Epoch 4/5\n",
  426. "291/291 [==============================] - 1s 4ms/step - loss: 0.6056 - val_loss: 0.6123\n",
  427. "Epoch 5/5\n",
  428. "291/291 [==============================] - 1s 3ms/step - loss: 0.5691 - val_loss: 0.5765\n",
  429. "73/73 [==============================] - 0s 2ms/step - loss: 0.5029\n",
  430. "Epoch 1/5\n",
  431. "291/291 [==============================] - 1s 3ms/step - loss: 1.3857 - val_loss: 1.2507\n",
  432. "Epoch 2/5\n",
  433. "291/291 [==============================] - 1s 3ms/step - loss: 0.9986 - val_loss: 0.7857\n",
  434. "Epoch 3/5\n",
  435. "291/291 [==============================] - 1s 2ms/step - loss: 0.6906 - val_loss: 0.7207\n",
  436. "Epoch 4/5\n",
  437. "291/291 [==============================] - 1s 4ms/step - loss: 0.6332 - val_loss: 0.6706\n",
  438. "Epoch 5/5\n",
  439. "291/291 [==============================] - 1s 3ms/step - loss: 0.5890 - val_loss: 0.6230\n",
  440. "73/73 [==============================] - 0s 3ms/step - loss: 0.5720\n",
  441. "Epoch 1/5\n",
  442. "291/291 [==============================] - 1s 3ms/step - loss: 1.8105 - val_loss: 0.7049\n",
  443. "Epoch 2/5\n",
  444. "291/291 [==============================] - 1s 3ms/step - loss: 0.6388 - val_loss: 0.6503\n",
  445. "Epoch 3/5\n",
  446. "291/291 [==============================] - 1s 2ms/step - loss: 0.5910 - val_loss: 0.6074\n",
  447. "Epoch 4/5\n",
  448. "291/291 [==============================] - 2s 5ms/step - loss: 0.5540 - val_loss: 0.5716\n",
  449. "Epoch 5/5\n",
  450. "291/291 [==============================] - 1s 4ms/step - loss: 0.5237 - val_loss: 0.5382\n",
  451. "73/73 [==============================] - 0s 2ms/step - loss: 0.5127\n",
  452. "Epoch 1/5\n",
  453. "291/291 [==============================] - 1s 3ms/step - loss: 1.8572 - val_loss: 0.7581\n",
  454. "Epoch 2/5\n",
  455. "291/291 [==============================] - 1s 2ms/step - loss: 0.6717 - val_loss: 0.6315\n",
  456. "Epoch 3/5\n",
  457. "291/291 [==============================] - 1s 3ms/step - loss: 0.5860 - val_loss: 0.5931\n",
  458. "Epoch 4/5\n",
  459. "291/291 [==============================] - 1s 2ms/step - loss: 0.5448 - val_loss: 0.5634\n",
  460. "Epoch 5/5\n",
  461. "291/291 [==============================] - 1s 2ms/step - loss: 0.5176 - val_loss: 0.5400\n",
  462. "73/73 [==============================] - 0s 1ms/step - loss: 0.5224\n",
  463. "Epoch 1/5\n",
  464. "291/291 [==============================] - 1s 2ms/step - loss: 2.1553 - val_loss: 1.3268\n",
  465. "Epoch 2/5\n",
  466. "291/291 [==============================] - 1s 2ms/step - loss: 0.8686 - val_loss: 0.6880\n",
  467. "Epoch 3/5\n",
  468. "291/291 [==============================] - 1s 2ms/step - loss: 0.6061 - val_loss: 0.6185\n",
  469. "Epoch 4/5\n",
  470. "291/291 [==============================] - 1s 2ms/step - loss: 0.5627 - val_loss: 0.5864\n",
  471. "Epoch 5/5\n",
  472. "291/291 [==============================] - 1s 3ms/step - loss: 0.5363 - val_loss: 0.5630\n",
  473. "73/73 [==============================] - 0s 1ms/step - loss: 0.5539\n",
  474. "Epoch 1/5\n",
  475. "291/291 [==============================] - 1s 3ms/step - loss: 1.3482 - val_loss: 0.7642\n",
  476. "Epoch 2/5\n",
  477. "291/291 [==============================] - 1s 4ms/step - loss: 0.6706 - val_loss: 0.9423\n",
  478. "Epoch 3/5\n",
  479. "291/291 [==============================] - 1s 3ms/step - loss: 1.3385 - val_loss: 0.6238\n",
  480. "Epoch 4/5\n",
  481. "291/291 [==============================] - 1s 3ms/step - loss: 0.5603 - val_loss: 0.5355\n",
  482. "Epoch 5/5\n",
  483. "291/291 [==============================] - 1s 3ms/step - loss: 0.4953 - val_loss: 0.4855\n",
  484. "73/73 [==============================] - 0s 2ms/step - loss: 0.4297\n",
  485. "Epoch 1/5\n",
  486. "291/291 [==============================] - 1s 2ms/step - loss: 1.9948 - val_loss: 0.8004\n",
  487. "Epoch 2/5\n",
  488. "291/291 [==============================] - 1s 2ms/step - loss: 0.5545 - val_loss: 0.5286\n",
  489. "Epoch 3/5\n",
  490. "291/291 [==============================] - 1s 2ms/step - loss: 0.4713 - val_loss: 0.4793\n",
  491. "Epoch 4/5\n",
  492. "291/291 [==============================] - 1s 3ms/step - loss: 0.4382 - val_loss: 0.4580\n",
  493. "Epoch 5/5\n",
  494. "291/291 [==============================] - 1s 2ms/step - loss: 0.4208 - val_loss: 0.4364\n",
  495. "73/73 [==============================] - 0s 2ms/step - loss: 0.4151\n",
  496. "Epoch 1/5\n",
  497. "291/291 [==============================] - 1s 2ms/step - loss: 1.8369 - val_loss: 0.6091\n",
  498. "Epoch 2/5\n",
  499. "291/291 [==============================] - 1s 2ms/step - loss: 0.5032 - val_loss: 0.5002\n",
  500. "Epoch 3/5\n",
  501. "291/291 [==============================] - 1s 2ms/step - loss: 0.4483 - val_loss: 0.4479\n",
  502. "Epoch 4/5\n",
  503. "291/291 [==============================] - 1s 2ms/step - loss: 0.4240 - val_loss: 0.4295\n",
  504. "Epoch 5/5\n",
  505. "291/291 [==============================] - 1s 2ms/step - loss: 0.4113 - val_loss: 0.4319\n",
  506. "73/73 [==============================] - 0s 1ms/step - loss: 0.4317\n",
  507. "Epoch 1/5\n",
  508. "291/291 [==============================] - 1s 2ms/step - loss: 0.9804 - val_loss: 0.7013\n",
  509. "Epoch 2/5\n",
  510. "291/291 [==============================] - 1s 2ms/step - loss: 0.6021 - val_loss: 0.6005\n",
  511. "Epoch 3/5\n",
  512. "291/291 [==============================] - 1s 2ms/step - loss: 0.5350 - val_loss: 0.5424\n",
  513. "Epoch 4/5\n",
  514. "291/291 [==============================] - 1s 2ms/step - loss: 0.4914 - val_loss: 0.5037\n",
  515. "Epoch 5/5\n",
  516. "291/291 [==============================] - 1s 2ms/step - loss: 0.4616 - val_loss: 0.4892\n",
  517. "73/73 [==============================] - 0s 1ms/step - loss: 0.4591\n",
  518. "Epoch 1/5\n",
  519. "291/291 [==============================] - 1s 2ms/step - loss: 1.2666 - val_loss: 0.7628\n",
  520. "Epoch 2/5\n",
  521. "291/291 [==============================] - 1s 2ms/step - loss: 0.6287 - val_loss: 0.6277\n",
  522. "Epoch 3/5\n",
  523. "291/291 [==============================] - 1s 2ms/step - loss: 0.5483 - val_loss: 0.5561\n",
  524. "Epoch 4/5\n",
  525. "291/291 [==============================] - 1s 2ms/step - loss: 0.5036 - val_loss: 0.5200\n",
  526. "Epoch 5/5\n",
  527. "291/291 [==============================] - 1s 2ms/step - loss: 0.4761 - val_loss: 0.4922\n",
  528. "73/73 [==============================] - 0s 1ms/step - loss: 0.4822\n",
  529. "Epoch 1/5\n",
  530. "291/291 [==============================] - 1s 2ms/step - loss: 11.3426 - val_loss: 9.6649\n",
  531. "Epoch 2/5\n",
  532. "291/291 [==============================] - 1s 2ms/step - loss: 7.9234 - val_loss: 7.0355\n",
  533. "Epoch 3/5\n",
  534. "291/291 [==============================] - 1s 2ms/step - loss: 5.9163 - val_loss: 5.3862\n",
  535. "Epoch 4/5\n",
  536. "291/291 [==============================] - 1s 2ms/step - loss: 4.6081 - val_loss: 4.2673\n",
  537. "Epoch 5/5\n",
  538. "291/291 [==============================] - 1s 2ms/step - loss: 3.6999 - val_loss: 3.4746\n",
  539. "73/73 [==============================] - 0s 1ms/step - loss: 3.2731\n",
  540. "Epoch 1/5\n",
  541. "291/291 [==============================] - 1s 2ms/step - loss: 3.9577 - val_loss: 3.4614\n",
  542. "Epoch 2/5\n",
  543. "291/291 [==============================] - 1s 2ms/step - loss: 3.2219 - val_loss: 2.8494\n",
  544. "Epoch 3/5\n",
  545. "291/291 [==============================] - 1s 2ms/step - loss: 2.6820 - val_loss: 2.4054\n",
  546. "Epoch 4/5\n",
  547. "291/291 [==============================] - 1s 2ms/step - loss: 2.2791 - val_loss: 2.0831\n",
  548. "Epoch 5/5\n",
  549. "291/291 [==============================] - 1s 4ms/step - loss: 1.9737 - val_loss: 1.8387\n",
  550. "73/73 [==============================] - 0s 3ms/step - loss: 1.6884\n",
  551. "Epoch 1/5\n",
  552. "291/291 [==============================] - 1s 3ms/step - loss: 4.7898 - val_loss: 4.1948\n",
  553. "Epoch 2/5\n",
  554. "291/291 [==============================] - 1s 4ms/step - loss: 3.8259 - val_loss: 3.3577\n",
  555. "Epoch 3/5\n",
  556. "291/291 [==============================] - 1s 5ms/step - loss: 3.1419 - val_loss: 2.7609\n",
  557. "Epoch 4/5\n",
  558. "291/291 [==============================] - 1s 4ms/step - loss: 2.6467 - val_loss: 2.3250\n",
  559. "Epoch 5/5\n",
  560. "291/291 [==============================] - 1s 3ms/step - loss: 2.2776 - val_loss: 1.9989\n",
  561. "73/73 [==============================] - 0s 2ms/step - loss: 1.9425\n",
  562. "Epoch 1/5\n",
  563. "291/291 [==============================] - 1s 4ms/step - loss: 5.8132 - val_loss: 5.0776\n",
  564. "Epoch 2/5\n",
  565. "291/291 [==============================] - 1s 4ms/step - loss: 4.3716 - val_loss: 3.9162\n",
  566. "Epoch 3/5\n",
  567. "291/291 [==============================] - 1s 3ms/step - loss: 3.4070 - val_loss: 3.1308\n",
  568. "Epoch 4/5\n",
  569. "291/291 [==============================] - 1s 2ms/step - loss: 2.7462 - val_loss: 2.5878\n",
  570. "Epoch 5/5\n",
  571. "291/291 [==============================] - 1s 2ms/step - loss: 2.2853 - val_loss: 2.2071\n",
  572. "73/73 [==============================] - 0s 1ms/step - loss: 2.0792\n",
  573. "Epoch 1/5\n"
  574. ]
  575. },
  576. {
  577. "name": "stdout",
  578. "output_type": "stream",
  579. "text": [
  580. "291/291 [==============================] - 1s 3ms/step - loss: 6.1835 - val_loss: 5.5398\n",
  581. "Epoch 2/5\n",
  582. "291/291 [==============================] - 1s 3ms/step - loss: 4.6036 - val_loss: 4.2368\n",
  583. "Epoch 3/5\n",
  584. "291/291 [==============================] - 2s 5ms/step - loss: 3.5759 - val_loss: 3.3669\n",
  585. "Epoch 4/5\n",
  586. "291/291 [==============================] - 1s 3ms/step - loss: 2.8785 - val_loss: 2.7662\n",
  587. "Epoch 5/5\n",
  588. "291/291 [==============================] - 1s 4ms/step - loss: 2.3915 - val_loss: 2.3408\n",
  589. "73/73 [==============================] - 0s 2ms/step - loss: 2.3029\n",
  590. "Epoch 1/5\n",
  591. "291/291 [==============================] - 1s 4ms/step - loss: 5.3489 - val_loss: 5.2520\n",
  592. "Epoch 2/5\n",
  593. "291/291 [==============================] - 1s 2ms/step - loss: 4.6538 - val_loss: 4.6065\n",
  594. "Epoch 3/5\n",
  595. "291/291 [==============================] - 1s 2ms/step - loss: 4.1037 - val_loss: 4.0977\n",
  596. "Epoch 4/5\n",
  597. "291/291 [==============================] - 1s 2ms/step - loss: 3.6613 - val_loss: 3.6868\n",
  598. "Epoch 5/5\n",
  599. "291/291 [==============================] - 1s 2ms/step - loss: 3.2988 - val_loss: 3.3474\n",
  600. "73/73 [==============================] - 0s 1ms/step - loss: 2.8968\n",
  601. "Epoch 1/5\n",
  602. "291/291 [==============================] - 1s 3ms/step - loss: 5.2808 - val_loss: 5.2694\n",
  603. "Epoch 2/5\n",
  604. "291/291 [==============================] - 1s 2ms/step - loss: 4.8091 - val_loss: 4.8044\n",
  605. "Epoch 3/5\n",
  606. "291/291 [==============================] - 1s 2ms/step - loss: 4.4032 - val_loss: 4.4053\n",
  607. "Epoch 4/5\n",
  608. "291/291 [==============================] - 1s 2ms/step - loss: 4.0512 - val_loss: 4.0588\n",
  609. "Epoch 5/5\n",
  610. "291/291 [==============================] - 1s 2ms/step - loss: 3.7438 - val_loss: 3.7558\n",
  611. "73/73 [==============================] - 0s 1ms/step - loss: 3.5815\n",
  612. "Epoch 1/5\n",
  613. "291/291 [==============================] - 1s 2ms/step - loss: 5.4911 - val_loss: 5.4957\n",
  614. "Epoch 2/5\n",
  615. "291/291 [==============================] - 1s 2ms/step - loss: 4.8665 - val_loss: 4.8745\n",
  616. "Epoch 3/5\n",
  617. "291/291 [==============================] - 1s 2ms/step - loss: 4.3461 - val_loss: 4.3599\n",
  618. "Epoch 4/5\n",
  619. "291/291 [==============================] - 1s 2ms/step - loss: 3.9080 - val_loss: 3.9234\n",
  620. "Epoch 5/5\n",
  621. "291/291 [==============================] - 1s 2ms/step - loss: 3.5329 - val_loss: 3.5503\n",
  622. "73/73 [==============================] - 0s 1ms/step - loss: 3.2364\n",
  623. "Epoch 1/5\n",
  624. "291/291 [==============================] - 1s 2ms/step - loss: 2.8242 - val_loss: 2.7665\n",
  625. "Epoch 2/5\n",
  626. "291/291 [==============================] - 1s 2ms/step - loss: 2.6523 - val_loss: 2.6052\n",
  627. "Epoch 3/5\n",
  628. "291/291 [==============================] - 1s 2ms/step - loss: 2.4958 - val_loss: 2.4619\n",
  629. "Epoch 4/5\n",
  630. "291/291 [==============================] - 1s 2ms/step - loss: 2.3534 - val_loss: 2.3323\n",
  631. "Epoch 5/5\n",
  632. "291/291 [==============================] - 1s 2ms/step - loss: 2.2234 - val_loss: 2.2147\n",
  633. "73/73 [==============================] - 0s 1ms/step - loss: 3.1935\n",
  634. "Epoch 1/5\n",
  635. "291/291 [==============================] - 1s 2ms/step - loss: 3.6630 - val_loss: 3.6252\n",
  636. "Epoch 2/5\n",
  637. "291/291 [==============================] - 1s 2ms/step - loss: 3.4023 - val_loss: 3.3646\n",
  638. "Epoch 3/5\n",
  639. "291/291 [==============================] - 1s 2ms/step - loss: 3.1727 - val_loss: 3.1349\n",
  640. "Epoch 4/5\n",
  641. "291/291 [==============================] - 1s 2ms/step - loss: 2.9694 - val_loss: 2.9313\n",
  642. "Epoch 5/5\n",
  643. "291/291 [==============================] - 1s 2ms/step - loss: 2.7887 - val_loss: 2.7489\n",
  644. "73/73 [==============================] - 0s 1ms/step - loss: 2.6280\n",
  645. "Epoch 1/5\n",
  646. "291/291 [==============================] - 1s 2ms/step - loss: 3.4105 - val_loss: 1.5951\n",
  647. "Epoch 2/5\n",
  648. "291/291 [==============================] - 1s 2ms/step - loss: 1.1307 - val_loss: 0.9534\n",
  649. "Epoch 3/5\n",
  650. "291/291 [==============================] - 1s 2ms/step - loss: 0.7960 - val_loss: 0.7504\n",
  651. "Epoch 4/5\n",
  652. "291/291 [==============================] - 1s 2ms/step - loss: 0.6617 - val_loss: 0.6499\n",
  653. "Epoch 5/5\n",
  654. "291/291 [==============================] - 1s 2ms/step - loss: 0.5909 - val_loss: 0.5935\n",
  655. "73/73 [==============================] - 0s 947us/step - loss: 0.5350\n",
  656. "Epoch 1/5\n",
  657. "291/291 [==============================] - 1s 2ms/step - loss: 3.0406 - val_loss: 1.7767\n",
  658. "Epoch 2/5\n",
  659. "291/291 [==============================] - 1s 2ms/step - loss: 1.2125 - val_loss: 1.0339\n",
  660. "Epoch 3/5\n",
  661. "291/291 [==============================] - 1s 2ms/step - loss: 0.8665 - val_loss: 0.8640\n",
  662. "Epoch 4/5\n",
  663. "291/291 [==============================] - 1s 2ms/step - loss: 0.7737 - val_loss: 0.7995\n",
  664. "Epoch 5/5\n",
  665. "291/291 [==============================] - 1s 2ms/step - loss: 0.7262 - val_loss: 0.7606\n",
  666. "73/73 [==============================] - 0s 1ms/step - loss: 0.6801\n",
  667. "Epoch 1/5\n",
  668. "291/291 [==============================] - 1s 2ms/step - loss: 3.6898 - val_loss: 1.6810\n",
  669. "Epoch 2/5\n",
  670. "291/291 [==============================] - 1s 2ms/step - loss: 1.2707 - val_loss: 0.9581\n",
  671. "Epoch 3/5\n",
  672. "291/291 [==============================] - 1s 2ms/step - loss: 0.8615 - val_loss: 0.7624\n",
  673. "Epoch 4/5\n",
  674. "291/291 [==============================] - 1s 2ms/step - loss: 0.6941 - val_loss: 0.6858\n",
  675. "Epoch 5/5\n",
  676. "291/291 [==============================] - 1s 2ms/step - loss: 0.6329 - val_loss: 0.6572\n",
  677. "73/73 [==============================] - 0s 1ms/step - loss: 0.6158\n",
  678. "Epoch 1/5\n",
  679. "291/291 [==============================] - 1s 2ms/step - loss: 2.5504 - val_loss: 1.1766\n",
  680. "Epoch 2/5\n",
  681. "291/291 [==============================] - 1s 2ms/step - loss: 0.9466 - val_loss: 0.8649\n",
  682. "Epoch 3/5\n",
  683. "291/291 [==============================] - 1s 3ms/step - loss: 0.7943 - val_loss: 0.8188\n",
  684. "Epoch 4/5\n",
  685. "291/291 [==============================] - 1s 3ms/step - loss: 0.7503 - val_loss: 0.7907\n",
  686. "Epoch 5/5\n",
  687. "291/291 [==============================] - 1s 3ms/step - loss: 0.7214 - val_loss: 0.7664\n",
  688. "73/73 [==============================] - 0s 1ms/step - loss: 0.7321\n",
  689. "Epoch 1/5\n",
  690. "291/291 [==============================] - 1s 5ms/step - loss: 3.0289 - val_loss: 1.5087\n",
  691. "Epoch 2/5\n",
  692. "291/291 [==============================] - 1s 3ms/step - loss: 1.0624 - val_loss: 0.9393\n",
  693. "Epoch 3/5\n",
  694. "291/291 [==============================] - 1s 2ms/step - loss: 0.8066 - val_loss: 0.8399\n",
  695. "Epoch 4/5\n",
  696. "291/291 [==============================] - 1s 2ms/step - loss: 0.7395 - val_loss: 0.7947\n",
  697. "Epoch 5/5\n",
  698. "291/291 [==============================] - 1s 3ms/step - loss: 0.7124 - val_loss: 0.7646\n",
  699. "73/73 [==============================] - 0s 2ms/step - loss: 0.7279\n",
  700. "Epoch 1/5\n",
  701. "291/291 [==============================] - 1s 3ms/step - loss: 1.2040 - val_loss: 0.6957\n",
  702. "Epoch 2/5\n",
  703. "291/291 [==============================] - 1s 2ms/step - loss: 0.6681 - val_loss: 0.6113\n",
  704. "Epoch 3/5\n",
  705. "291/291 [==============================] - 1s 2ms/step - loss: 0.5447 - val_loss: 0.5632\n",
  706. "Epoch 4/5\n",
  707. "291/291 [==============================] - 1s 2ms/step - loss: 0.5139 - val_loss: 0.5305\n",
  708. "Epoch 5/5\n",
  709. "291/291 [==============================] - 1s 3ms/step - loss: 0.4909 - val_loss: 0.5009\n",
  710. "73/73 [==============================] - 0s 2ms/step - loss: 0.4421\n",
  711. "Epoch 1/5\n",
  712. "291/291 [==============================] - 1s 2ms/step - loss: 1.3196 - val_loss: 0.6275\n",
  713. "Epoch 2/5\n",
  714. "291/291 [==============================] - 1s 2ms/step - loss: 0.5722 - val_loss: 0.5555\n",
  715. "Epoch 3/5\n",
  716. "291/291 [==============================] - 1s 2ms/step - loss: 0.5062 - val_loss: 0.5341\n",
  717. "Epoch 4/5\n",
  718. "291/291 [==============================] - 1s 2ms/step - loss: 0.4898 - val_loss: 0.5205\n",
  719. "Epoch 5/5\n",
  720. "291/291 [==============================] - 1s 2ms/step - loss: 0.4753 - val_loss: 0.5048\n",
  721. "73/73 [==============================] - 0s 1ms/step - loss: 0.4867\n",
  722. "Epoch 1/5\n",
  723. "291/291 [==============================] - 1s 2ms/step - loss: 1.6235 - val_loss: 0.7264\n",
  724. "Epoch 2/5\n",
  725. "291/291 [==============================] - 1s 2ms/step - loss: 0.6525 - val_loss: 0.6394\n",
  726. "Epoch 3/5\n",
  727. "291/291 [==============================] - 1s 2ms/step - loss: 0.5787 - val_loss: 0.5830\n",
  728. "Epoch 4/5\n",
  729. "291/291 [==============================] - 1s 2ms/step - loss: 0.5417 - val_loss: 0.7250\n",
  730. "Epoch 5/5\n",
  731. "291/291 [==============================] - 0s 2ms/step - loss: 0.5343 - val_loss: 0.6121\n",
  732. "73/73 [==============================] - 0s 989us/step - loss: 0.5311\n",
  733. "Epoch 1/5\n",
  734. "291/291 [==============================] - 1s 4ms/step - loss: 1.1426 - val_loss: 0.9536\n",
  735. "Epoch 2/5\n",
  736. "291/291 [==============================] - 1s 2ms/step - loss: 1.3551 - val_loss: 0.7176\n",
  737. "Epoch 3/5\n",
  738. "291/291 [==============================] - 1s 2ms/step - loss: 0.6717 - val_loss: 0.6206\n",
  739. "Epoch 4/5\n"
  740. ]
  741. },
  742. {
  743. "name": "stdout",
  744. "output_type": "stream",
  745. "text": [
  746. "291/291 [==============================] - 1s 2ms/step - loss: 0.5368 - val_loss: 0.5669\n",
  747. "Epoch 5/5\n",
  748. "291/291 [==============================] - 1s 2ms/step - loss: 0.5008 - val_loss: 0.5377\n",
  749. "73/73 [==============================] - 0s 2ms/step - loss: 0.5252\n",
  750. "Epoch 1/5\n",
  751. "291/291 [==============================] - 1s 2ms/step - loss: 1.7605 - val_loss: 1.7377\n",
  752. "Epoch 2/5\n",
  753. "291/291 [==============================] - 1s 2ms/step - loss: 0.6260 - val_loss: 0.5712\n",
  754. "Epoch 3/5\n",
  755. "291/291 [==============================] - 1s 2ms/step - loss: 0.5062 - val_loss: 0.5299\n",
  756. "Epoch 4/5\n",
  757. "291/291 [==============================] - 1s 2ms/step - loss: 0.4784 - val_loss: 0.5035\n",
  758. "Epoch 5/5\n",
  759. "291/291 [==============================] - 1s 2ms/step - loss: 0.4626 - val_loss: 0.4875\n",
  760. "73/73 [==============================] - 0s 1ms/step - loss: 0.4773\n",
  761. "Epoch 1/5\n",
  762. "291/291 [==============================] - 1s 2ms/step - loss: 0.9199 - val_loss: 0.8311\n",
  763. "Epoch 2/5\n",
  764. "291/291 [==============================] - 1s 2ms/step - loss: 0.9259 - val_loss: 0.5136\n",
  765. "Epoch 3/5\n",
  766. "291/291 [==============================] - 1s 2ms/step - loss: 0.4665 - val_loss: 0.4505\n",
  767. "Epoch 4/5\n",
  768. "291/291 [==============================] - 1s 2ms/step - loss: 0.4377 - val_loss: 0.4385\n",
  769. "Epoch 5/5\n",
  770. "291/291 [==============================] - 1s 2ms/step - loss: 0.4285 - val_loss: 0.4362\n",
  771. "73/73 [==============================] - 0s 1ms/step - loss: 0.3902\n",
  772. "Epoch 1/5\n",
  773. "291/291 [==============================] - 1s 2ms/step - loss: 1.0497 - val_loss: 0.6815\n",
  774. "Epoch 2/5\n",
  775. "291/291 [==============================] - 1s 2ms/step - loss: 0.6271 - val_loss: 0.5409\n",
  776. "Epoch 3/5\n",
  777. "291/291 [==============================] - 1s 2ms/step - loss: 0.4848 - val_loss: 0.4983\n",
  778. "Epoch 4/5\n",
  779. "291/291 [==============================] - 1s 2ms/step - loss: 0.4560 - val_loss: 0.4947\n",
  780. "Epoch 5/5\n",
  781. "291/291 [==============================] - 1s 2ms/step - loss: 0.4378 - val_loss: 0.4619\n",
  782. "73/73 [==============================] - 0s 1ms/step - loss: 0.4390\n",
  783. "Epoch 1/5\n",
  784. "291/291 [==============================] - 1s 2ms/step - loss: 1.0232 - val_loss: 0.7099\n",
  785. "Epoch 2/5\n",
  786. "291/291 [==============================] - 1s 2ms/step - loss: 0.5465 - val_loss: 0.5445\n",
  787. "Epoch 3/5\n",
  788. "291/291 [==============================] - 1s 2ms/step - loss: 0.4846 - val_loss: 0.4916\n",
  789. "Epoch 4/5\n",
  790. "291/291 [==============================] - 1s 2ms/step - loss: 0.4548 - val_loss: 0.4742\n",
  791. "Epoch 5/5\n",
  792. "291/291 [==============================] - 1s 2ms/step - loss: 0.4365 - val_loss: 0.4621\n",
  793. "73/73 [==============================] - 0s 1ms/step - loss: 0.4356\n",
  794. "Epoch 1/5\n",
  795. "291/291 [==============================] - 1s 2ms/step - loss: 1.1020 - val_loss: 0.5543\n",
  796. "Epoch 2/5\n",
  797. "291/291 [==============================] - 1s 2ms/step - loss: 0.5268 - val_loss: 0.4679\n",
  798. "Epoch 3/5\n",
  799. "291/291 [==============================] - 1s 2ms/step - loss: 0.4284 - val_loss: 0.4413\n",
  800. "Epoch 4/5\n",
  801. "291/291 [==============================] - 1s 2ms/step - loss: 0.4153 - val_loss: 0.4318\n",
  802. "Epoch 5/5\n",
  803. "291/291 [==============================] - 1s 2ms/step - loss: 0.4082 - val_loss: 0.4287\n",
  804. "73/73 [==============================] - 0s 1ms/step - loss: 0.4246\n",
  805. "Epoch 1/5\n",
  806. "291/291 [==============================] - 1s 2ms/step - loss: 1.2389 - val_loss: 0.8160\n",
  807. "Epoch 2/5\n",
  808. "291/291 [==============================] - 1s 2ms/step - loss: 0.6134 - val_loss: 0.5100\n",
  809. "Epoch 3/5\n",
  810. "291/291 [==============================] - 1s 2ms/step - loss: 0.4617 - val_loss: 0.4737\n",
  811. "Epoch 4/5\n",
  812. "291/291 [==============================] - 1s 2ms/step - loss: 0.4357 - val_loss: 0.4494\n",
  813. "Epoch 5/5\n",
  814. "291/291 [==============================] - 1s 2ms/step - loss: 0.4217 - val_loss: 0.4427\n",
  815. "73/73 [==============================] - 0s 1ms/step - loss: 0.4434\n",
  816. "Epoch 1/5\n",
  817. "291/291 [==============================] - 1s 2ms/step - loss: 5.9462 - val_loss: 5.3994\n",
  818. "Epoch 2/5\n",
  819. "291/291 [==============================] - 1s 2ms/step - loss: 4.5568 - val_loss: 4.1889\n",
  820. "Epoch 3/5\n",
  821. "291/291 [==============================] - 1s 2ms/step - loss: 3.5882 - val_loss: 3.3368\n",
  822. "Epoch 4/5\n",
  823. "291/291 [==============================] - 1s 2ms/step - loss: 2.8971 - val_loss: 2.7195\n",
  824. "Epoch 5/5\n",
  825. "291/291 [==============================] - 1s 2ms/step - loss: 2.3940 - val_loss: 2.2686\n",
  826. "73/73 [==============================] - 0s 1ms/step - loss: 2.1046\n",
  827. "Epoch 1/5\n",
  828. "291/291 [==============================] - 1s 2ms/step - loss: 5.1198 - val_loss: 4.4710\n",
  829. "Epoch 2/5\n",
  830. "291/291 [==============================] - 1s 2ms/step - loss: 4.0800 - val_loss: 3.5782\n",
  831. "Epoch 3/5\n",
  832. "291/291 [==============================] - 1s 2ms/step - loss: 3.3225 - val_loss: 2.9251\n",
  833. "Epoch 4/5\n",
  834. "291/291 [==============================] - 1s 2ms/step - loss: 2.7587 - val_loss: 2.4422\n",
  835. "Epoch 5/5\n",
  836. "291/291 [==============================] - 1s 2ms/step - loss: 2.3270 - val_loss: 2.0739\n",
  837. "73/73 [==============================] - 0s 1ms/step - loss: 1.9872\n",
  838. "Epoch 1/5\n",
  839. "291/291 [==============================] - 1s 2ms/step - loss: 6.4180 - val_loss: 5.6155\n",
  840. "Epoch 2/5\n",
  841. "291/291 [==============================] - 1s 2ms/step - loss: 4.6716 - val_loss: 4.2075\n",
  842. "Epoch 3/5\n",
  843. "291/291 [==============================] - 1s 2ms/step - loss: 3.5735 - val_loss: 3.2943\n",
  844. "Epoch 4/5\n",
  845. "291/291 [==============================] - 1s 2ms/step - loss: 2.8514 - val_loss: 2.6878\n",
  846. "Epoch 5/5\n",
  847. "291/291 [==============================] - 1s 2ms/step - loss: 2.3604 - val_loss: 2.2671\n",
  848. "73/73 [==============================] - 0s 1ms/step - loss: 2.1656\n",
  849. "Epoch 1/5\n",
  850. "291/291 [==============================] - 1s 2ms/step - loss: 5.6762 - val_loss: 5.1921\n",
  851. "Epoch 2/5\n",
  852. "291/291 [==============================] - 1s 2ms/step - loss: 4.4648 - val_loss: 4.1648\n",
  853. "Epoch 3/5\n",
  854. "291/291 [==============================] - 1s 2ms/step - loss: 3.6373 - val_loss: 3.4422\n",
  855. "Epoch 4/5\n",
  856. "291/291 [==============================] - 1s 2ms/step - loss: 3.0460 - val_loss: 2.9170\n",
  857. "Epoch 5/5\n",
  858. "291/291 [==============================] - 1s 2ms/step - loss: 2.6108 - val_loss: 2.5224\n",
  859. "73/73 [==============================] - 0s 1ms/step - loss: 2.4740\n",
  860. "Epoch 1/5\n",
  861. "291/291 [==============================] - 1s 2ms/step - loss: 5.4584 - val_loss: 5.0898\n",
  862. "Epoch 2/5\n",
  863. "291/291 [==============================] - 1s 2ms/step - loss: 4.3076 - val_loss: 4.1554\n",
  864. "Epoch 3/5\n",
  865. "291/291 [==============================] - 1s 2ms/step - loss: 3.5466 - val_loss: 3.5179\n",
  866. "Epoch 4/5\n",
  867. "291/291 [==============================] - 1s 2ms/step - loss: 3.0222 - val_loss: 3.0626\n",
  868. "Epoch 5/5\n",
  869. "291/291 [==============================] - 1s 2ms/step - loss: 2.6462 - val_loss: 2.7265\n",
  870. "73/73 [==============================] - 0s 1ms/step - loss: 2.5530\n",
  871. "Epoch 1/5\n",
  872. "291/291 [==============================] - 1s 2ms/step - loss: 5.5961 - val_loss: 5.5606\n",
  873. "Epoch 2/5\n",
  874. "291/291 [==============================] - 1s 2ms/step - loss: 4.9043 - val_loss: 4.9012\n",
  875. "Epoch 3/5\n",
  876. "291/291 [==============================] - 1s 2ms/step - loss: 4.3269 - val_loss: 4.3492\n",
  877. "Epoch 4/5\n",
  878. "291/291 [==============================] - 1s 2ms/step - loss: 3.8424 - val_loss: 3.8820\n",
  879. "Epoch 5/5\n",
  880. "291/291 [==============================] - 1s 2ms/step - loss: 3.4317 - val_loss: 3.4854\n",
  881. "73/73 [==============================] - 0s 1ms/step - loss: 3.1216\n",
  882. "Epoch 1/5\n",
  883. "291/291 [==============================] - 1s 2ms/step - loss: 6.6512 - val_loss: 6.0233\n",
  884. "Epoch 2/5\n",
  885. "291/291 [==============================] - 1s 2ms/step - loss: 5.3255 - val_loss: 4.9024\n",
  886. "Epoch 3/5\n",
  887. "291/291 [==============================] - 1s 2ms/step - loss: 4.3633 - val_loss: 4.0702\n",
  888. "Epoch 4/5\n",
  889. "291/291 [==============================] - 1s 2ms/step - loss: 3.6494 - val_loss: 3.4423\n",
  890. "Epoch 5/5\n",
  891. "291/291 [==============================] - 1s 2ms/step - loss: 3.1090 - val_loss: 2.9607\n",
  892. "73/73 [==============================] - 0s 1ms/step - loss: 2.8689\n",
  893. "Epoch 1/5\n",
  894. "291/291 [==============================] - 1s 2ms/step - loss: 6.2986 - val_loss: 6.0976\n",
  895. "Epoch 2/5\n",
  896. "291/291 [==============================] - 1s 2ms/step - loss: 5.4293 - val_loss: 5.2836\n",
  897. "Epoch 3/5\n",
  898. "291/291 [==============================] - 1s 2ms/step - loss: 4.7284 - val_loss: 4.6202\n",
  899. "Epoch 4/5\n",
  900. "291/291 [==============================] - 1s 2ms/step - loss: 4.1536 - val_loss: 4.0759\n",
  901. "Epoch 5/5\n",
  902. "291/291 [==============================] - 1s 2ms/step - loss: 3.6796 - val_loss: 3.6258\n",
  903. "73/73 [==============================] - 0s 1ms/step - loss: 3.4060\n",
  904. "Epoch 1/5\n"
  905. ]
  906. },
  907. {
  908. "name": "stdout",
  909. "output_type": "stream",
  910. "text": [
  911. "291/291 [==============================] - 1s 2ms/step - loss: 5.2352 - val_loss: 5.0652\n",
  912. "Epoch 2/5\n",
  913. "291/291 [==============================] - 1s 2ms/step - loss: 4.4762 - val_loss: 4.3471\n",
  914. "Epoch 3/5\n",
  915. "291/291 [==============================] - 1s 2ms/step - loss: 3.8593 - val_loss: 3.7632\n",
  916. "Epoch 4/5\n",
  917. "291/291 [==============================] - 1s 2ms/step - loss: 3.3549 - val_loss: 3.2855\n",
  918. "Epoch 5/5\n",
  919. "291/291 [==============================] - 1s 2ms/step - loss: 2.9407 - val_loss: 2.8939\n",
  920. "73/73 [==============================] - 0s 1ms/step - loss: 2.7242\n",
  921. "Epoch 1/5\n",
  922. "291/291 [==============================] - 1s 2ms/step - loss: 6.4990 - val_loss: 6.1872\n",
  923. "Epoch 2/5\n",
  924. "291/291 [==============================] - 1s 2ms/step - loss: 5.6846 - val_loss: 5.4463\n",
  925. "Epoch 3/5\n",
  926. "291/291 [==============================] - 1s 2ms/step - loss: 5.0232 - val_loss: 4.8393\n",
  927. "Epoch 4/5\n",
  928. "291/291 [==============================] - 1s 2ms/step - loss: 4.4751 - val_loss: 4.3345\n",
  929. "Epoch 5/5\n",
  930. "291/291 [==============================] - 1s 2ms/step - loss: 4.0148 - val_loss: 3.9112\n",
  931. "73/73 [==============================] - 0s 1ms/step - loss: 3.6314\n",
  932. "Epoch 1/5\n",
  933. "291/291 [==============================] - 1s 2ms/step - loss: 1.7775 - val_loss: 1.0805\n",
  934. "Epoch 2/5\n",
  935. "291/291 [==============================] - 1s 2ms/step - loss: 0.8752 - val_loss: 0.8295\n",
  936. "Epoch 3/5\n",
  937. "291/291 [==============================] - 1s 2ms/step - loss: 0.7351 - val_loss: 0.7698\n",
  938. "Epoch 4/5\n",
  939. "291/291 [==============================] - 1s 2ms/step - loss: 0.6834 - val_loss: 0.7293\n",
  940. "Epoch 5/5\n",
  941. "291/291 [==============================] - 1s 2ms/step - loss: 0.6494 - val_loss: 0.6961\n",
  942. "73/73 [==============================] - 0s 1ms/step - loss: 0.6062\n",
  943. "Epoch 1/5\n",
  944. "291/291 [==============================] - 1s 2ms/step - loss: 2.4905 - val_loss: 1.3628\n",
  945. "Epoch 2/5\n",
  946. "291/291 [==============================] - 1s 3ms/step - loss: 0.9625 - val_loss: 0.9005\n",
  947. "Epoch 3/5\n",
  948. "291/291 [==============================] - 1s 2ms/step - loss: 0.7388 - val_loss: 0.7783\n",
  949. "Epoch 4/5\n",
  950. "291/291 [==============================] - 1s 2ms/step - loss: 0.6761 - val_loss: 0.7298\n",
  951. "Epoch 5/5\n",
  952. "291/291 [==============================] - 1s 2ms/step - loss: 0.6439 - val_loss: 0.6964\n",
  953. "73/73 [==============================] - 0s 2ms/step - loss: 0.6262\n",
  954. "Epoch 1/5\n",
  955. "291/291 [==============================] - 1s 4ms/step - loss: 2.4798 - val_loss: 1.1401\n",
  956. "Epoch 2/5\n",
  957. "291/291 [==============================] - 1s 3ms/step - loss: 0.8964 - val_loss: 0.8314\n",
  958. "Epoch 3/5\n",
  959. "291/291 [==============================] - 2s 7ms/step - loss: 0.7351 - val_loss: 0.7566\n",
  960. "Epoch 4/5\n",
  961. "291/291 [==============================] - 2s 6ms/step - loss: 0.6868 - val_loss: 0.7200\n",
  962. "Epoch 5/5\n",
  963. "291/291 [==============================] - 1s 5ms/step - loss: 0.6590 - val_loss: 0.6941\n",
  964. "73/73 [==============================] - 0s 1ms/step - loss: 0.6492\n",
  965. "Epoch 1/5\n",
  966. "291/291 [==============================] - 1s 4ms/step - loss: 2.6666 - val_loss: 1.3002\n",
  967. "Epoch 2/5\n",
  968. "291/291 [==============================] - 1s 3ms/step - loss: 1.0116 - val_loss: 0.9707\n",
  969. "Epoch 3/5\n",
  970. "291/291 [==============================] - 1s 2ms/step - loss: 0.8455 - val_loss: 0.8674\n",
  971. "Epoch 4/5\n",
  972. "291/291 [==============================] - 1s 2ms/step - loss: 0.7741 - val_loss: 0.8135\n",
  973. "Epoch 5/5\n",
  974. "291/291 [==============================] - 1s 3ms/step - loss: 0.7288 - val_loss: 0.7753\n",
  975. "73/73 [==============================] - 0s 1ms/step - loss: 0.7695\n",
  976. "Epoch 1/5\n",
  977. "291/291 [==============================] - 1s 4ms/step - loss: 2.8676 - val_loss: 1.4169\n",
  978. "Epoch 2/5\n",
  979. "291/291 [==============================] - 2s 5ms/step - loss: 0.9800 - val_loss: 0.8503\n",
  980. "Epoch 3/5\n",
  981. "291/291 [==============================] - 1s 3ms/step - loss: 0.7140 - val_loss: 0.7437\n",
  982. "Epoch 4/5\n",
  983. "291/291 [==============================] - 1s 2ms/step - loss: 0.6514 - val_loss: 0.7036\n",
  984. "Epoch 5/5\n",
  985. "291/291 [==============================] - 1s 2ms/step - loss: 0.6218 - val_loss: 0.6772\n",
  986. "73/73 [==============================] - 0s 1ms/step - loss: 0.6605\n",
  987. "Epoch 1/5\n",
  988. "291/291 [==============================] - 1s 2ms/step - loss: 1.1299 - val_loss: 0.6697\n",
  989. "Epoch 2/5\n",
  990. "291/291 [==============================] - 1s 2ms/step - loss: 0.5930 - val_loss: 0.7551\n",
  991. "Epoch 3/5\n",
  992. "291/291 [==============================] - 1s 2ms/step - loss: 0.5378 - val_loss: 0.5328\n",
  993. "Epoch 4/5\n",
  994. "291/291 [==============================] - 1s 2ms/step - loss: 0.5006 - val_loss: 0.4989\n",
  995. "Epoch 5/5\n",
  996. "291/291 [==============================] - 1s 2ms/step - loss: 0.4861 - val_loss: 0.4922\n",
  997. "73/73 [==============================] - 0s 943us/step - loss: 0.4453\n",
  998. "Epoch 1/5\n",
  999. "291/291 [==============================] - 1s 2ms/step - loss: 1.7476 - val_loss: 0.7915\n",
  1000. "Epoch 2/5\n",
  1001. "291/291 [==============================] - 1s 2ms/step - loss: 0.6503 - val_loss: 0.6637\n",
  1002. "Epoch 3/5\n",
  1003. "291/291 [==============================] - 1s 2ms/step - loss: 0.5645 - val_loss: 0.5692\n",
  1004. "Epoch 4/5\n",
  1005. "291/291 [==============================] - 1s 2ms/step - loss: 0.5084 - val_loss: 0.5288\n",
  1006. "Epoch 5/5\n",
  1007. "291/291 [==============================] - 1s 2ms/step - loss: 0.4789 - val_loss: 0.5013\n",
  1008. "73/73 [==============================] - 0s 1ms/step - loss: 0.4777\n",
  1009. "Epoch 1/5\n",
  1010. "291/291 [==============================] - 1s 2ms/step - loss: 2.8266 - val_loss: 0.7361\n",
  1011. "Epoch 2/5\n",
  1012. "291/291 [==============================] - 1s 2ms/step - loss: 0.6013 - val_loss: 0.6103\n",
  1013. "Epoch 3/5\n",
  1014. "291/291 [==============================] - 1s 2ms/step - loss: 0.5167 - val_loss: 0.5371\n",
  1015. "Epoch 4/5\n",
  1016. "291/291 [==============================] - 1s 2ms/step - loss: 0.4818 - val_loss: 0.4986\n",
  1017. "Epoch 5/5\n",
  1018. "291/291 [==============================] - 1s 2ms/step - loss: 0.4585 - val_loss: 0.4728\n",
  1019. "73/73 [==============================] - 0s 1ms/step - loss: 0.4512\n",
  1020. "Epoch 1/5\n",
  1021. "291/291 [==============================] - 1s 2ms/step - loss: 1.0593 - val_loss: 0.7257\n",
  1022. "Epoch 2/5\n",
  1023. "291/291 [==============================] - 1s 2ms/step - loss: 0.8543 - val_loss: 0.5513\n",
  1024. "Epoch 3/5\n",
  1025. "291/291 [==============================] - 1s 2ms/step - loss: 0.4859 - val_loss: 0.5154\n",
  1026. "Epoch 4/5\n",
  1027. "291/291 [==============================] - 1s 3ms/step - loss: 0.4652 - val_loss: 0.5001\n",
  1028. "Epoch 5/5\n",
  1029. "291/291 [==============================] - 1s 4ms/step - loss: 0.4529 - val_loss: 0.4907\n",
  1030. "73/73 [==============================] - 0s 1ms/step - loss: 0.4780\n",
  1031. "Epoch 1/5\n",
  1032. "291/291 [==============================] - 1s 2ms/step - loss: 1.2468 - val_loss: 0.6793\n",
  1033. "Epoch 2/5\n",
  1034. "291/291 [==============================] - 1s 2ms/step - loss: 0.5681 - val_loss: 0.5651\n",
  1035. "Epoch 3/5\n",
  1036. "291/291 [==============================] - 1s 2ms/step - loss: 0.4914 - val_loss: 0.5098\n",
  1037. "Epoch 4/5\n",
  1038. "291/291 [==============================] - 1s 3ms/step - loss: 0.4578 - val_loss: 0.4871\n",
  1039. "Epoch 5/5\n",
  1040. "291/291 [==============================] - 1s 3ms/step - loss: 0.4407 - val_loss: 0.4668\n",
  1041. "73/73 [==============================] - 0s 2ms/step - loss: 0.4557\n",
  1042. "Epoch 1/5\n",
  1043. "291/291 [==============================] - 1s 5ms/step - loss: 0.8795 - val_loss: 0.6695\n",
  1044. "Epoch 2/5\n",
  1045. "291/291 [==============================] - 1s 2ms/step - loss: 0.6576 - val_loss: 0.5118\n",
  1046. "Epoch 3/5\n",
  1047. "291/291 [==============================] - 1s 2ms/step - loss: 0.4713 - val_loss: 0.4705\n",
  1048. "Epoch 4/5\n",
  1049. "291/291 [==============================] - 1s 2ms/step - loss: 0.4478 - val_loss: 0.4548\n",
  1050. "Epoch 5/5\n",
  1051. "291/291 [==============================] - 1s 2ms/step - loss: 0.4295 - val_loss: 0.4358\n",
  1052. "73/73 [==============================] - 0s 1ms/step - loss: 0.3935\n",
  1053. "Epoch 1/5\n",
  1054. "291/291 [==============================] - 1s 2ms/step - loss: 1.6897 - val_loss: 0.6858\n",
  1055. "Epoch 2/5\n",
  1056. "291/291 [==============================] - 1s 2ms/step - loss: 0.5339 - val_loss: 0.5148\n",
  1057. "Epoch 3/5\n",
  1058. "291/291 [==============================] - 1s 2ms/step - loss: 0.4858 - val_loss: 0.4658\n",
  1059. "Epoch 4/5\n",
  1060. "291/291 [==============================] - 1s 2ms/step - loss: 0.4206 - val_loss: 0.4369\n",
  1061. "Epoch 5/5\n",
  1062. "291/291 [==============================] - 1s 2ms/step - loss: 0.4057 - val_loss: 0.4349\n",
  1063. "73/73 [==============================] - 0s 1ms/step - loss: 0.4082\n",
  1064. "Epoch 1/5\n",
  1065. "291/291 [==============================] - 1s 2ms/step - loss: 4.4084 - val_loss: 0.5663\n",
  1066. "Epoch 2/5\n",
  1067. "291/291 [==============================] - 1s 3ms/step - loss: 0.4907 - val_loss: 0.4774\n",
  1068. "Epoch 3/5\n",
  1069. "291/291 [==============================] - 1s 3ms/step - loss: 0.4581 - val_loss: 0.4706\n",
  1070. "Epoch 4/5\n"
  1071. ]
  1072. },
  1073. {
  1074. "name": "stdout",
  1075. "output_type": "stream",
  1076. "text": [
  1077. "291/291 [==============================] - 1s 2ms/step - loss: 0.4535 - val_loss: 0.4955\n",
  1078. "Epoch 5/5\n",
  1079. "291/291 [==============================] - 1s 4ms/step - loss: 0.4354 - val_loss: 0.4710\n",
  1080. "73/73 [==============================] - 0s 2ms/step - loss: 0.4747\n",
  1081. "Epoch 1/5\n",
  1082. "291/291 [==============================] - 1s 2ms/step - loss: 1.1389 - val_loss: 1.1028\n",
  1083. "Epoch 2/5\n",
  1084. "291/291 [==============================] - 1s 3ms/step - loss: 1.3702 - val_loss: 0.5831\n",
  1085. "Epoch 3/5\n",
  1086. "291/291 [==============================] - 1s 2ms/step - loss: 0.4709 - val_loss: 0.4648\n",
  1087. "Epoch 4/5\n",
  1088. "291/291 [==============================] - 1s 2ms/step - loss: 0.4225 - val_loss: 0.4191\n",
  1089. "Epoch 5/5\n",
  1090. "291/291 [==============================] - 1s 2ms/step - loss: 0.4031 - val_loss: 0.4984\n",
  1091. "73/73 [==============================] - 0s 1ms/step - loss: 0.4532\n",
  1092. "Epoch 1/5\n",
  1093. "291/291 [==============================] - 1s 2ms/step - loss: 1.6040 - val_loss: 1.1473\n",
  1094. "Epoch 2/5\n",
  1095. "291/291 [==============================] - 1s 2ms/step - loss: 0.8149 - val_loss: 0.5125\n",
  1096. "Epoch 3/5\n",
  1097. "291/291 [==============================] - 1s 2ms/step - loss: 0.4445 - val_loss: 0.4635\n",
  1098. "Epoch 4/5\n",
  1099. "291/291 [==============================] - 1s 2ms/step - loss: 0.4117 - val_loss: 0.4230\n",
  1100. "Epoch 5/5\n",
  1101. "291/291 [==============================] - 1s 2ms/step - loss: 0.3932 - val_loss: 0.4073\n",
  1102. "73/73 [==============================] - 0s 1ms/step - loss: 0.4032\n",
  1103. "Epoch 1/5\n",
  1104. "291/291 [==============================] - 1s 2ms/step - loss: 4.8431 - val_loss: 4.4298\n",
  1105. "Epoch 2/5\n",
  1106. "291/291 [==============================] - 1s 2ms/step - loss: 3.8964 - val_loss: 3.5922\n",
  1107. "Epoch 3/5\n",
  1108. "291/291 [==============================] - 1s 2ms/step - loss: 3.2067 - val_loss: 2.9695\n",
  1109. "Epoch 4/5\n",
  1110. "291/291 [==============================] - 1s 2ms/step - loss: 2.6906 - val_loss: 2.5035\n",
  1111. "Epoch 5/5\n",
  1112. "291/291 [==============================] - 1s 2ms/step - loss: 2.2988 - val_loss: 2.1467\n",
  1113. "73/73 [==============================] - 0s 1ms/step - loss: 2.4946\n",
  1114. "Epoch 1/5\n",
  1115. "291/291 [==============================] - 1s 2ms/step - loss: 3.6508 - val_loss: 3.3424\n",
  1116. "Epoch 2/5\n",
  1117. "291/291 [==============================] - 1s 2ms/step - loss: 2.8830 - val_loss: 2.6862\n",
  1118. "Epoch 3/5\n",
  1119. "291/291 [==============================] - 1s 2ms/step - loss: 2.3444 - val_loss: 2.2240\n",
  1120. "Epoch 4/5\n",
  1121. "291/291 [==============================] - 1s 2ms/step - loss: 1.9541 - val_loss: 1.8881\n",
  1122. "Epoch 5/5\n",
  1123. "291/291 [==============================] - 1s 2ms/step - loss: 1.6693 - val_loss: 1.6400\n",
  1124. "73/73 [==============================] - 0s 1ms/step - loss: 1.5052\n",
  1125. "Epoch 1/5\n",
  1126. "291/291 [==============================] - 1s 2ms/step - loss: 5.0877 - val_loss: 4.3724\n",
  1127. "Epoch 2/5\n",
  1128. "291/291 [==============================] - 1s 2ms/step - loss: 3.6712 - val_loss: 3.2121\n",
  1129. "Epoch 3/5\n",
  1130. "291/291 [==============================] - 1s 2ms/step - loss: 2.7445 - val_loss: 2.4536\n",
  1131. "Epoch 4/5\n",
  1132. "291/291 [==============================] - 1s 2ms/step - loss: 2.1272 - val_loss: 1.9471\n",
  1133. "Epoch 5/5\n",
  1134. "291/291 [==============================] - 1s 2ms/step - loss: 1.7069 - val_loss: 1.6035\n",
  1135. "73/73 [==============================] - 0s 1ms/step - loss: 1.5064\n",
  1136. "Epoch 1/5\n",
  1137. "291/291 [==============================] - 1s 2ms/step - loss: 4.3458 - val_loss: 3.8979\n",
  1138. "Epoch 2/5\n",
  1139. "291/291 [==============================] - 1s 2ms/step - loss: 3.3387 - val_loss: 3.0390\n",
  1140. "Epoch 3/5\n",
  1141. "291/291 [==============================] - 1s 2ms/step - loss: 2.6441 - val_loss: 2.4462\n",
  1142. "Epoch 4/5\n",
  1143. "291/291 [==============================] - 1s 2ms/step - loss: 2.1550 - val_loss: 2.0293\n",
  1144. "Epoch 5/5\n",
  1145. "291/291 [==============================] - 1s 2ms/step - loss: 1.8072 - val_loss: 1.7341\n",
  1146. "73/73 [==============================] - 0s 1ms/step - loss: 1.7072\n",
  1147. "Epoch 1/5\n",
  1148. "291/291 [==============================] - 1s 2ms/step - loss: 4.7489 - val_loss: 4.2925\n",
  1149. "Epoch 2/5\n",
  1150. "291/291 [==============================] - 1s 2ms/step - loss: 3.7297 - val_loss: 3.4276\n",
  1151. "Epoch 3/5\n",
  1152. "291/291 [==============================] - 1s 2ms/step - loss: 3.0318 - val_loss: 2.8198\n",
  1153. "Epoch 4/5\n",
  1154. "291/291 [==============================] - 1s 2ms/step - loss: 2.5223 - val_loss: 2.3716\n",
  1155. "Epoch 5/5\n",
  1156. "291/291 [==============================] - 1s 2ms/step - loss: 2.1374 - val_loss: 2.0333\n",
  1157. "73/73 [==============================] - 0s 1ms/step - loss: 1.8961\n",
  1158. "Epoch 1/5\n",
  1159. "291/291 [==============================] - 1s 2ms/step - loss: 6.0949 - val_loss: 5.9642\n",
  1160. "Epoch 2/5\n",
  1161. "291/291 [==============================] - 1s 2ms/step - loss: 5.2081 - val_loss: 5.1238\n",
  1162. "Epoch 3/5\n",
  1163. "291/291 [==============================] - 1s 2ms/step - loss: 4.5058 - val_loss: 4.4552\n",
  1164. "Epoch 4/5\n",
  1165. "291/291 [==============================] - 1s 2ms/step - loss: 3.9423 - val_loss: 3.9172\n",
  1166. "Epoch 5/5\n",
  1167. "291/291 [==============================] - 1s 2ms/step - loss: 3.4844 - val_loss: 3.4773\n",
  1168. "73/73 [==============================] - 0s 1ms/step - loss: 3.4060\n",
  1169. "Epoch 1/5\n",
  1170. "291/291 [==============================] - 1s 2ms/step - loss: 5.8099 - val_loss: 5.6138\n",
  1171. "Epoch 2/5\n",
  1172. "291/291 [==============================] - 1s 2ms/step - loss: 5.0486 - val_loss: 4.9111\n",
  1173. "Epoch 3/5\n",
  1174. "291/291 [==============================] - 1s 3ms/step - loss: 4.4310 - val_loss: 4.3354\n",
  1175. "Epoch 4/5\n",
  1176. "291/291 [==============================] - 1s 2ms/step - loss: 3.9221 - val_loss: 3.8585\n",
  1177. "Epoch 5/5\n",
  1178. "291/291 [==============================] - 1s 3ms/step - loss: 3.4999 - val_loss: 3.4587\n",
  1179. "73/73 [==============================] - 0s 2ms/step - loss: 3.3306\n",
  1180. "Epoch 1/5\n",
  1181. "291/291 [==============================] - 1s 3ms/step - loss: 5.0719 - val_loss: 4.9424\n",
  1182. "Epoch 2/5\n",
  1183. "291/291 [==============================] - 1s 2ms/step - loss: 4.3805 - val_loss: 4.2896\n",
  1184. "Epoch 3/5\n",
  1185. "291/291 [==============================] - 1s 2ms/step - loss: 3.8256 - val_loss: 3.7613\n",
  1186. "Epoch 4/5\n",
  1187. "291/291 [==============================] - 1s 2ms/step - loss: 3.3768 - val_loss: 3.3289\n",
  1188. "Epoch 5/5\n",
  1189. "291/291 [==============================] - 1s 2ms/step - loss: 3.0103 - val_loss: 2.9760\n",
  1190. "73/73 [==============================] - 0s 1ms/step - loss: 2.8672\n",
  1191. "Epoch 1/5\n",
  1192. "291/291 [==============================] - 1s 2ms/step - loss: 3.9293 - val_loss: 3.7809\n",
  1193. "Epoch 2/5\n",
  1194. "291/291 [==============================] - 1s 2ms/step - loss: 3.4273 - val_loss: 3.3214\n",
  1195. "Epoch 3/5\n",
  1196. "291/291 [==============================] - 1s 2ms/step - loss: 3.0156 - val_loss: 2.9436\n",
  1197. "Epoch 4/5\n",
  1198. "291/291 [==============================] - 1s 2ms/step - loss: 2.6760 - val_loss: 2.6306\n",
  1199. "Epoch 5/5\n",
  1200. "291/291 [==============================] - 1s 2ms/step - loss: 2.3935 - val_loss: 2.3690\n",
  1201. "73/73 [==============================] - 0s 1ms/step - loss: 2.4299\n",
  1202. "Epoch 1/5\n",
  1203. "291/291 [==============================] - 1s 2ms/step - loss: 5.6445 - val_loss: 5.4724\n",
  1204. "Epoch 2/5\n",
  1205. "291/291 [==============================] - 1s 2ms/step - loss: 4.8032 - val_loss: 4.6855\n",
  1206. "Epoch 3/5\n",
  1207. "291/291 [==============================] - 1s 2ms/step - loss: 4.1332 - val_loss: 4.0505\n",
  1208. "Epoch 4/5\n",
  1209. "291/291 [==============================] - 1s 2ms/step - loss: 3.5889 - val_loss: 3.5309\n",
  1210. "Epoch 5/5\n",
  1211. "291/291 [==============================] - 1s 2ms/step - loss: 3.1414 - val_loss: 3.1017\n",
  1212. "73/73 [==============================] - 0s 1ms/step - loss: 2.9799\n",
  1213. "Epoch 1/5\n",
  1214. "291/291 [==============================] - 1s 2ms/step - loss: 1.8680 - val_loss: 0.9095\n",
  1215. "Epoch 2/5\n",
  1216. "291/291 [==============================] - 1s 2ms/step - loss: 0.7450 - val_loss: 0.7481\n",
  1217. "Epoch 3/5\n",
  1218. "291/291 [==============================] - 1s 2ms/step - loss: 0.6777 - val_loss: 0.7161\n",
  1219. "Epoch 4/5\n",
  1220. "291/291 [==============================] - 1s 2ms/step - loss: 0.6517 - val_loss: 0.6906\n",
  1221. "Epoch 5/5\n",
  1222. "291/291 [==============================] - 1s 2ms/step - loss: 0.6303 - val_loss: 0.6672\n",
  1223. "73/73 [==============================] - 0s 2ms/step - loss: 0.5767\n",
  1224. "Epoch 1/5\n",
  1225. "291/291 [==============================] - 1s 2ms/step - loss: 2.7646 - val_loss: 1.3593\n",
  1226. "Epoch 2/5\n",
  1227. "291/291 [==============================] - 1s 2ms/step - loss: 0.9766 - val_loss: 0.9108\n",
  1228. "Epoch 3/5\n",
  1229. "291/291 [==============================] - 1s 2ms/step - loss: 0.7827 - val_loss: 0.8161\n",
  1230. "Epoch 4/5\n",
  1231. "291/291 [==============================] - 1s 2ms/step - loss: 0.7270 - val_loss: 0.7671\n",
  1232. "Epoch 5/5\n",
  1233. "291/291 [==============================] - 1s 2ms/step - loss: 0.6918 - val_loss: 0.7324\n",
  1234. "73/73 [==============================] - 0s 1ms/step - loss: 0.6594\n",
  1235. "Epoch 1/5\n"
  1236. ]
  1237. },
  1238. {
  1239. "name": "stdout",
  1240. "output_type": "stream",
  1241. "text": [
  1242. "291/291 [==============================] - 1s 2ms/step - loss: 2.4562 - val_loss: 1.0808\n",
  1243. "Epoch 2/5\n",
  1244. "291/291 [==============================] - 1s 2ms/step - loss: 0.8778 - val_loss: 0.7828\n",
  1245. "Epoch 3/5\n",
  1246. "291/291 [==============================] - 1s 2ms/step - loss: 0.7101 - val_loss: 0.7339\n",
  1247. "Epoch 4/5\n",
  1248. "291/291 [==============================] - 1s 2ms/step - loss: 0.6682 - val_loss: 0.7072\n",
  1249. "Epoch 5/5\n",
  1250. "291/291 [==============================] - 1s 2ms/step - loss: 0.6433 - val_loss: 0.6847\n",
  1251. "73/73 [==============================] - 0s 1ms/step - loss: 0.6255\n",
  1252. "Epoch 1/5\n",
  1253. "291/291 [==============================] - 1s 2ms/step - loss: 2.5754 - val_loss: 1.1130\n",
  1254. "Epoch 2/5\n",
  1255. "291/291 [==============================] - 1s 2ms/step - loss: 0.8611 - val_loss: 0.7854\n",
  1256. "Epoch 3/5\n",
  1257. "291/291 [==============================] - 1s 2ms/step - loss: 0.6886 - val_loss: 0.7143\n",
  1258. "Epoch 4/5\n",
  1259. "291/291 [==============================] - 1s 2ms/step - loss: 0.6345 - val_loss: 0.6764\n",
  1260. "Epoch 5/5\n",
  1261. "291/291 [==============================] - 1s 2ms/step - loss: 0.6030 - val_loss: 0.6476\n",
  1262. "73/73 [==============================] - 0s 1ms/step - loss: 0.6255\n",
  1263. "Epoch 1/5\n",
  1264. "291/291 [==============================] - 1s 2ms/step - loss: 1.6733 - val_loss: 0.9456\n",
  1265. "Epoch 2/5\n",
  1266. "291/291 [==============================] - 1s 2ms/step - loss: 0.8057 - val_loss: 0.7899\n",
  1267. "Epoch 3/5\n",
  1268. "291/291 [==============================] - 1s 2ms/step - loss: 0.7073 - val_loss: 0.7410\n",
  1269. "Epoch 4/5\n",
  1270. "291/291 [==============================] - 1s 2ms/step - loss: 0.6618 - val_loss: 0.7061\n",
  1271. "Epoch 5/5\n",
  1272. "291/291 [==============================] - 1s 2ms/step - loss: 0.6299 - val_loss: 0.6771\n",
  1273. "73/73 [==============================] - 0s 1ms/step - loss: 0.6498\n",
  1274. "Epoch 1/5\n",
  1275. "291/291 [==============================] - 1s 2ms/step - loss: 1.0539 - val_loss: 0.7109\n",
  1276. "Epoch 2/5\n",
  1277. "291/291 [==============================] - 1s 2ms/step - loss: 0.6761 - val_loss: 0.6193\n",
  1278. "Epoch 3/5\n",
  1279. "291/291 [==============================] - 1s 2ms/step - loss: 0.5472 - val_loss: 0.5222\n",
  1280. "Epoch 4/5\n",
  1281. "291/291 [==============================] - 1s 2ms/step - loss: 0.4975 - val_loss: 0.4945\n",
  1282. "Epoch 5/5\n",
  1283. "291/291 [==============================] - 1s 2ms/step - loss: 0.4777 - val_loss: 0.4760\n",
  1284. "73/73 [==============================] - 0s 1ms/step - loss: 0.4122\n",
  1285. "Epoch 1/5\n",
  1286. "291/291 [==============================] - 1s 2ms/step - loss: 1.0067 - val_loss: 0.6291\n",
  1287. "Epoch 2/5\n",
  1288. "291/291 [==============================] - 1s 2ms/step - loss: 0.5524 - val_loss: 0.6332\n",
  1289. "Epoch 3/5\n",
  1290. "291/291 [==============================] - 1s 2ms/step - loss: 2.0710 - val_loss: 0.6396\n",
  1291. "Epoch 4/5\n",
  1292. "291/291 [==============================] - 1s 2ms/step - loss: 0.5363 - val_loss: 0.5346\n",
  1293. "Epoch 5/5\n",
  1294. "291/291 [==============================] - 1s 2ms/step - loss: 0.4777 - val_loss: 0.4961\n",
  1295. "73/73 [==============================] - 0s 1ms/step - loss: 0.4685\n",
  1296. "Epoch 1/5\n",
  1297. "291/291 [==============================] - 1s 2ms/step - loss: 1.2691 - val_loss: 0.7390\n",
  1298. "Epoch 2/5\n",
  1299. "291/291 [==============================] - 1s 2ms/step - loss: 0.6981 - val_loss: 0.6452\n",
  1300. "Epoch 3/5\n",
  1301. "291/291 [==============================] - 1s 2ms/step - loss: 0.5722 - val_loss: 0.5759\n",
  1302. "Epoch 4/5\n",
  1303. "291/291 [==============================] - 1s 2ms/step - loss: 0.5152 - val_loss: 0.5298\n",
  1304. "Epoch 5/5\n",
  1305. "291/291 [==============================] - 1s 2ms/step - loss: 0.4863 - val_loss: 0.5171\n",
  1306. "73/73 [==============================] - 0s 2ms/step - loss: 0.4828\n",
  1307. "Epoch 1/5\n",
  1308. "291/291 [==============================] - 1s 2ms/step - loss: 1.1677 - val_loss: 0.7886\n",
  1309. "Epoch 2/5\n",
  1310. "291/291 [==============================] - 1s 2ms/step - loss: 1.0718 - val_loss: 0.7433\n",
  1311. "Epoch 3/5\n",
  1312. "291/291 [==============================] - 1s 2ms/step - loss: 0.5381 - val_loss: 0.5369\n",
  1313. "Epoch 4/5\n",
  1314. "291/291 [==============================] - 1s 2ms/step - loss: 0.4745 - val_loss: 0.4975\n",
  1315. "Epoch 5/5\n",
  1316. "291/291 [==============================] - 1s 2ms/step - loss: 0.4491 - val_loss: 0.4757\n",
  1317. "73/73 [==============================] - 0s 1ms/step - loss: 0.4637\n",
  1318. "Epoch 1/5\n",
  1319. "291/291 [==============================] - 1s 2ms/step - loss: 0.9471 - val_loss: 0.7296\n",
  1320. "Epoch 2/5\n",
  1321. "291/291 [==============================] - 1s 2ms/step - loss: 0.8193 - val_loss: 0.6135\n",
  1322. "Epoch 3/5\n",
  1323. "291/291 [==============================] - 1s 2ms/step - loss: 0.5363 - val_loss: 0.5541\n",
  1324. "Epoch 4/5\n",
  1325. "291/291 [==============================] - 1s 2ms/step - loss: 0.4980 - val_loss: 0.5192\n",
  1326. "Epoch 5/5\n",
  1327. "291/291 [==============================] - 1s 2ms/step - loss: 0.4775 - val_loss: 0.5033\n",
  1328. "73/73 [==============================] - 0s 1ms/step - loss: 0.4845\n",
  1329. "Epoch 1/5\n",
  1330. "291/291 [==============================] - 1s 2ms/step - loss: 0.8326 - val_loss: 0.6469\n",
  1331. "Epoch 2/5\n",
  1332. "291/291 [==============================] - 1s 2ms/step - loss: 0.6240 - val_loss: 0.5958\n",
  1333. "Epoch 3/5\n",
  1334. "291/291 [==============================] - 1s 2ms/step - loss: 0.5536 - val_loss: 0.5016\n",
  1335. "Epoch 4/5\n",
  1336. "291/291 [==============================] - 1s 2ms/step - loss: 0.4686 - val_loss: 0.4842\n",
  1337. "Epoch 5/5\n",
  1338. "291/291 [==============================] - 1s 2ms/step - loss: 0.4493 - val_loss: 0.4616\n",
  1339. "73/73 [==============================] - 0s 1ms/step - loss: 0.4097\n",
  1340. "Epoch 1/5\n",
  1341. "291/291 [==============================] - 1s 2ms/step - loss: 1.2851 - val_loss: 0.7020\n",
  1342. "Epoch 2/5\n",
  1343. "291/291 [==============================] - 1s 2ms/step - loss: 0.5312 - val_loss: 0.5198\n",
  1344. "Epoch 3/5\n",
  1345. "291/291 [==============================] - 1s 2ms/step - loss: 0.4622 - val_loss: 0.4763\n",
  1346. "Epoch 4/5\n",
  1347. "291/291 [==============================] - 1s 2ms/step - loss: 0.4367 - val_loss: 0.4503\n",
  1348. "Epoch 5/5\n",
  1349. "291/291 [==============================] - 1s 2ms/step - loss: 0.4220 - val_loss: 0.4425\n",
  1350. "73/73 [==============================] - 0s 1ms/step - loss: 0.4271\n",
  1351. "Epoch 1/5\n",
  1352. "291/291 [==============================] - 1s 2ms/step - loss: 2.5619 - val_loss: 0.8989\n",
  1353. "Epoch 2/5\n",
  1354. "291/291 [==============================] - 1s 2ms/step - loss: 0.4934 - val_loss: 0.4401\n",
  1355. "Epoch 3/5\n",
  1356. "291/291 [==============================] - 1s 2ms/step - loss: 0.4101 - val_loss: 0.4346\n",
  1357. "Epoch 4/5\n",
  1358. "291/291 [==============================] - 1s 2ms/step - loss: 0.3935 - val_loss: 0.4130\n",
  1359. "Epoch 5/5\n",
  1360. "291/291 [==============================] - 1s 2ms/step - loss: 0.3857 - val_loss: 0.3897\n",
  1361. "73/73 [==============================] - 0s 1ms/step - loss: 0.3874\n",
  1362. "Epoch 1/5\n",
  1363. "291/291 [==============================] - 1s 2ms/step - loss: 0.9801 - val_loss: 1.3518\n",
  1364. "Epoch 2/5\n",
  1365. "291/291 [==============================] - 1s 2ms/step - loss: 1.8492 - val_loss: 0.5649\n",
  1366. "Epoch 3/5\n",
  1367. "291/291 [==============================] - 1s 2ms/step - loss: 0.4798 - val_loss: 0.5138\n",
  1368. "Epoch 4/5\n",
  1369. "291/291 [==============================] - 1s 2ms/step - loss: 0.4331 - val_loss: 0.4536\n",
  1370. "Epoch 5/5\n",
  1371. "291/291 [==============================] - 1s 2ms/step - loss: 0.4110 - val_loss: 0.4318\n",
  1372. "73/73 [==============================] - 0s 1ms/step - loss: 0.4307\n",
  1373. "Epoch 1/5\n",
  1374. "291/291 [==============================] - 1s 2ms/step - loss: 1.1531 - val_loss: 0.7210\n",
  1375. "Epoch 2/5\n",
  1376. "291/291 [==============================] - 1s 2ms/step - loss: 0.6263 - val_loss: 0.5434\n",
  1377. "Epoch 3/5\n",
  1378. "291/291 [==============================] - 1s 2ms/step - loss: 0.5054 - val_loss: 0.5116\n",
  1379. "Epoch 4/5\n",
  1380. "291/291 [==============================] - 1s 2ms/step - loss: 0.4522 - val_loss: 0.4736\n",
  1381. "Epoch 5/5\n",
  1382. "291/291 [==============================] - 1s 2ms/step - loss: 0.4431 - val_loss: 0.5277\n",
  1383. "73/73 [==============================] - 0s 1ms/step - loss: 0.4751\n",
  1384. "Epoch 1/5\n",
  1385. "291/291 [==============================] - 1s 2ms/step - loss: 5.6641 - val_loss: 4.8325\n",
  1386. "Epoch 2/5\n",
  1387. "291/291 [==============================] - 1s 2ms/step - loss: 3.8576 - val_loss: 3.2776\n",
  1388. "Epoch 3/5\n",
  1389. "291/291 [==============================] - 1s 2ms/step - loss: 2.6229 - val_loss: 2.2628\n",
  1390. "Epoch 4/5\n",
  1391. "291/291 [==============================] - 1s 2ms/step - loss: 1.8654 - val_loss: 1.6736\n",
  1392. "Epoch 5/5\n",
  1393. "291/291 [==============================] - 1s 2ms/step - loss: 1.4440 - val_loss: 1.3556\n",
  1394. "73/73 [==============================] - 0s 1ms/step - loss: 1.2508\n",
  1395. "Epoch 1/5\n",
  1396. "291/291 [==============================] - 1s 2ms/step - loss: 7.2170 - val_loss: 6.9118\n",
  1397. "Epoch 2/5\n",
  1398. "291/291 [==============================] - 1s 2ms/step - loss: 5.9878 - val_loss: 5.8670\n",
  1399. "Epoch 3/5\n",
  1400. "291/291 [==============================] - 1s 2ms/step - loss: 5.1511 - val_loss: 5.1158\n",
  1401. "Epoch 4/5\n"
  1402. ]
  1403. },
  1404. {
  1405. "name": "stdout",
  1406. "output_type": "stream",
  1407. "text": [
  1408. "291/291 [==============================] - 1s 2ms/step - loss: 4.5268 - val_loss: 4.5353\n",
  1409. "Epoch 5/5\n",
  1410. "291/291 [==============================] - 1s 2ms/step - loss: 4.0324 - val_loss: 4.0648\n",
  1411. "73/73 [==============================] - 0s 1ms/step - loss: 3.9163\n",
  1412. "Epoch 1/5\n",
  1413. "291/291 [==============================] - 1s 2ms/step - loss: 5.5210 - val_loss: 4.8195\n",
  1414. "Epoch 2/5\n",
  1415. "291/291 [==============================] - 1s 2ms/step - loss: 4.0962 - val_loss: 3.8202\n",
  1416. "Epoch 3/5\n",
  1417. "291/291 [==============================] - 1s 2ms/step - loss: 3.3504 - val_loss: 3.2475\n",
  1418. "Epoch 4/5\n",
  1419. "291/291 [==============================] - 1s 2ms/step - loss: 2.9146 - val_loss: 2.8860\n",
  1420. "Epoch 5/5\n",
  1421. "291/291 [==============================] - 1s 2ms/step - loss: 2.6333 - val_loss: 2.6334\n",
  1422. "73/73 [==============================] - 0s 1ms/step - loss: 2.5792\n",
  1423. "Epoch 1/5\n",
  1424. "291/291 [==============================] - 1s 2ms/step - loss: 5.0636 - val_loss: 4.9443\n",
  1425. "Epoch 2/5\n",
  1426. "291/291 [==============================] - 1s 3ms/step - loss: 4.5002 - val_loss: 4.3994\n",
  1427. "Epoch 3/5\n",
  1428. "291/291 [==============================] - 1s 2ms/step - loss: 4.0020 - val_loss: 3.9139\n",
  1429. "Epoch 4/5\n",
  1430. "291/291 [==============================] - 1s 2ms/step - loss: 3.5588 - val_loss: 3.4798\n",
  1431. "Epoch 5/5\n",
  1432. "291/291 [==============================] - 1s 2ms/step - loss: 3.1645 - val_loss: 3.0939\n",
  1433. "73/73 [==============================] - 0s 1ms/step - loss: 2.8371\n",
  1434. "Epoch 1/5\n",
  1435. "291/291 [==============================] - 1s 2ms/step - loss: 3.9923 - val_loss: 3.8633\n",
  1436. "Epoch 2/5\n",
  1437. "291/291 [==============================] - 1s 2ms/step - loss: 3.3281 - val_loss: 3.2841\n",
  1438. "Epoch 3/5\n",
  1439. "291/291 [==============================] - 1s 2ms/step - loss: 2.8468 - val_loss: 2.8545\n",
  1440. "Epoch 4/5\n",
  1441. "291/291 [==============================] - 1s 2ms/step - loss: 2.4866 - val_loss: 2.5283\n",
  1442. "Epoch 5/5\n",
  1443. "291/291 [==============================] - 1s 2ms/step - loss: 2.2078 - val_loss: 2.2688\n",
  1444. "73/73 [==============================] - 0s 2ms/step - loss: 2.1177\n",
  1445. "Epoch 1/5\n",
  1446. "291/291 [==============================] - 1s 2ms/step - loss: 4.5277 - val_loss: 4.5153\n",
  1447. "Epoch 2/5\n",
  1448. "291/291 [==============================] - 1s 2ms/step - loss: 4.1607 - val_loss: 4.1431\n",
  1449. "Epoch 3/5\n",
  1450. "291/291 [==============================] - 1s 2ms/step - loss: 3.8010 - val_loss: 3.7765\n",
  1451. "Epoch 4/5\n",
  1452. "291/291 [==============================] - 1s 2ms/step - loss: 3.4551 - val_loss: 3.4328\n",
  1453. "Epoch 5/5\n",
  1454. "291/291 [==============================] - 1s 2ms/step - loss: 3.1402 - val_loss: 3.1273\n",
  1455. "73/73 [==============================] - 0s 1ms/step - loss: 2.8686\n",
  1456. "Epoch 1/5\n",
  1457. "291/291 [==============================] - 1s 2ms/step - loss: 4.7848 - val_loss: 4.8538\n",
  1458. "Epoch 2/5\n",
  1459. "291/291 [==============================] - 1s 2ms/step - loss: 4.4333 - val_loss: 4.5093\n",
  1460. "Epoch 3/5\n",
  1461. "291/291 [==============================] - 1s 2ms/step - loss: 4.1206 - val_loss: 4.2002\n",
  1462. "Epoch 4/5\n",
  1463. "291/291 [==============================] - 1s 2ms/step - loss: 3.8409 - val_loss: 3.9234\n",
  1464. "Epoch 5/5\n",
  1465. "291/291 [==============================] - 1s 2ms/step - loss: 3.5894 - val_loss: 3.6717\n",
  1466. "73/73 [==============================] - 0s 1ms/step - loss: 3.5211\n",
  1467. "Epoch 1/5\n",
  1468. "291/291 [==============================] - 1s 4ms/step - loss: 9.1569 - val_loss: 8.6366\n",
  1469. "Epoch 2/5\n",
  1470. "291/291 [==============================] - 1s 2ms/step - loss: 7.5285 - val_loss: 7.2997\n",
  1471. "Epoch 3/5\n",
  1472. "291/291 [==============================] - 1s 2ms/step - loss: 6.4347 - val_loss: 6.3497\n",
  1473. "Epoch 4/5\n",
  1474. "291/291 [==============================] - 1s 2ms/step - loss: 5.6371 - val_loss: 5.6331\n",
  1475. "Epoch 5/5\n",
  1476. "291/291 [==============================] - 1s 3ms/step - loss: 5.0232 - val_loss: 5.0635\n",
  1477. "73/73 [==============================] - 0s 1ms/step - loss: 4.8631\n",
  1478. "Epoch 1/5\n",
  1479. "291/291 [==============================] - 1s 3ms/step - loss: 6.3648 - val_loss: 6.4245\n",
  1480. "Epoch 2/5\n",
  1481. "291/291 [==============================] - 1s 3ms/step - loss: 5.8497 - val_loss: 5.9268\n",
  1482. "Epoch 3/5\n",
  1483. "291/291 [==============================] - 1s 2ms/step - loss: 5.4040 - val_loss: 5.4928\n",
  1484. "Epoch 4/5\n",
  1485. "291/291 [==============================] - 1s 3ms/step - loss: 5.0078 - val_loss: 5.1023\n",
  1486. "Epoch 5/5\n",
  1487. "291/291 [==============================] - 1s 2ms/step - loss: 4.6443 - val_loss: 4.7369\n",
  1488. "73/73 [==============================] - 0s 2ms/step - loss: 4.4073\n",
  1489. "Epoch 1/5\n",
  1490. "291/291 [==============================] - 1s 3ms/step - loss: 5.1593 - val_loss: 5.2022\n",
  1491. "Epoch 2/5\n",
  1492. "291/291 [==============================] - 1s 3ms/step - loss: 4.8037 - val_loss: 4.8456\n",
  1493. "Epoch 3/5\n",
  1494. "291/291 [==============================] - 1s 2ms/step - loss: 4.4718 - val_loss: 4.5103\n",
  1495. "Epoch 4/5\n",
  1496. "291/291 [==============================] - 1s 2ms/step - loss: 4.1593 - val_loss: 4.1932\n",
  1497. "Epoch 5/5\n",
  1498. "291/291 [==============================] - 1s 2ms/step - loss: 3.8634 - val_loss: 3.8906\n",
  1499. "73/73 [==============================] - 0s 1ms/step - loss: 3.8143\n",
  1500. "Epoch 1/5\n",
  1501. "291/291 [==============================] - 1s 2ms/step - loss: 2.6233 - val_loss: 1.5174\n",
  1502. "Epoch 2/5\n",
  1503. "291/291 [==============================] - 1s 3ms/step - loss: 1.1822 - val_loss: 1.1096\n",
  1504. "Epoch 3/5\n",
  1505. "291/291 [==============================] - 0s 2ms/step - loss: 0.9414 - val_loss: 0.9299\n",
  1506. "Epoch 4/5\n",
  1507. "291/291 [==============================] - 0s 2ms/step - loss: 0.8175 - val_loss: 0.8322\n",
  1508. "Epoch 5/5\n",
  1509. "291/291 [==============================] - 0s 2ms/step - loss: 0.7501 - val_loss: 0.7778\n",
  1510. "73/73 [==============================] - 0s 1ms/step - loss: 0.6963\n",
  1511. "Epoch 1/5\n",
  1512. "291/291 [==============================] - 1s 2ms/step - loss: 4.1645 - val_loss: 2.6877\n",
  1513. "Epoch 2/5\n",
  1514. "291/291 [==============================] - 0s 2ms/step - loss: 1.9671 - val_loss: 1.7414\n",
  1515. "Epoch 3/5\n",
  1516. "291/291 [==============================] - 1s 3ms/step - loss: 1.4915 - val_loss: 1.4736\n",
  1517. "Epoch 4/5\n",
  1518. "291/291 [==============================] - 1s 2ms/step - loss: 1.3302 - val_loss: 1.3518\n",
  1519. "Epoch 5/5\n",
  1520. "291/291 [==============================] - 0s 2ms/step - loss: 1.2437 - val_loss: 1.2727\n",
  1521. "73/73 [==============================] - 0s 1ms/step - loss: 1.2184\n",
  1522. "Epoch 1/5\n",
  1523. "291/291 [==============================] - 0s 1ms/step - loss: 3.2925 - val_loss: 1.8886\n",
  1524. "Epoch 2/5\n",
  1525. "291/291 [==============================] - 0s 1ms/step - loss: 1.3801 - val_loss: 1.2669\n",
  1526. "Epoch 3/5\n",
  1527. "291/291 [==============================] - 0s 1ms/step - loss: 1.0121 - val_loss: 0.9720\n",
  1528. "Epoch 4/5\n",
  1529. "291/291 [==============================] - 0s 1ms/step - loss: 0.8040 - val_loss: 0.7939\n",
  1530. "Epoch 5/5\n",
  1531. "291/291 [==============================] - 0s 1ms/step - loss: 0.6898 - val_loss: 0.7023\n",
  1532. "73/73 [==============================] - 0s 741us/step - loss: 0.6630\n",
  1533. "Epoch 1/5\n",
  1534. "291/291 [==============================] - 0s 1ms/step - loss: 2.1169 - val_loss: 1.2152\n",
  1535. "Epoch 2/5\n",
  1536. "291/291 [==============================] - 0s 1ms/step - loss: 0.9299 - val_loss: 0.8905\n",
  1537. "Epoch 3/5\n",
  1538. "291/291 [==============================] - 0s 1ms/step - loss: 0.7871 - val_loss: 0.7966\n",
  1539. "Epoch 4/5\n",
  1540. "291/291 [==============================] - 0s 1ms/step - loss: 0.7281 - val_loss: 0.7472\n",
  1541. "Epoch 5/5\n",
  1542. "291/291 [==============================] - 0s 1ms/step - loss: 0.6913 - val_loss: 0.7120\n",
  1543. "73/73 [==============================] - 0s 998us/step - loss: 0.7615\n",
  1544. "Epoch 1/5\n",
  1545. "291/291 [==============================] - 0s 1ms/step - loss: 2.4891 - val_loss: 1.2539\n",
  1546. "Epoch 2/5\n",
  1547. "291/291 [==============================] - 0s 1ms/step - loss: 0.9480 - val_loss: 0.8004\n",
  1548. "Epoch 3/5\n",
  1549. "291/291 [==============================] - 0s 1ms/step - loss: 0.7368 - val_loss: 0.7300\n",
  1550. "Epoch 4/5\n",
  1551. "291/291 [==============================] - 0s 1ms/step - loss: 0.6747 - val_loss: 0.6932\n",
  1552. "Epoch 5/5\n",
  1553. "291/291 [==============================] - 0s 1ms/step - loss: 0.6403 - val_loss: 0.6685\n",
  1554. "73/73 [==============================] - 0s 887us/step - loss: 0.6678\n",
  1555. "Epoch 1/5\n",
  1556. "291/291 [==============================] - 0s 1ms/step - loss: 1.4230 - val_loss: 0.9523\n",
  1557. "Epoch 2/5\n",
  1558. "291/291 [==============================] - 0s 1ms/step - loss: 0.7410 - val_loss: 0.6910\n",
  1559. "Epoch 3/5\n",
  1560. "291/291 [==============================] - 0s 1ms/step - loss: 0.6234 - val_loss: 0.6393\n",
  1561. "Epoch 4/5\n",
  1562. "291/291 [==============================] - 1s 2ms/step - loss: 0.5747 - val_loss: 0.5845\n",
  1563. "Epoch 5/5\n",
  1564. "291/291 [==============================] - 0s 1ms/step - loss: 0.5367 - val_loss: 0.5476\n",
  1565. "73/73 [==============================] - 0s 890us/step - loss: 0.4781\n",
  1566. "Epoch 1/5\n"
  1567. ]
  1568. },
  1569. {
  1570. "name": "stdout",
  1571. "output_type": "stream",
  1572. "text": [
  1573. "291/291 [==============================] - 0s 1ms/step - loss: 1.0420 - val_loss: 0.6764\n",
  1574. "Epoch 2/5\n",
  1575. "291/291 [==============================] - 0s 1ms/step - loss: 0.6167 - val_loss: 0.5992\n",
  1576. "Epoch 3/5\n",
  1577. "291/291 [==============================] - 0s 1ms/step - loss: 0.5512 - val_loss: 0.5490\n",
  1578. "Epoch 4/5\n",
  1579. "291/291 [==============================] - 0s 1ms/step - loss: 0.5034 - val_loss: 0.5085\n",
  1580. "Epoch 5/5\n",
  1581. "291/291 [==============================] - 0s 1ms/step - loss: 0.4736 - val_loss: 0.4957\n",
  1582. "73/73 [==============================] - 0s 776us/step - loss: 0.4760\n",
  1583. "Epoch 1/5\n",
  1584. "291/291 [==============================] - 0s 1ms/step - loss: 2.0855 - val_loss: 1.4162\n",
  1585. "Epoch 2/5\n",
  1586. "291/291 [==============================] - 0s 1ms/step - loss: 1.2415 - val_loss: 1.2320\n",
  1587. "Epoch 3/5\n",
  1588. "291/291 [==============================] - 0s 1ms/step - loss: 1.0421 - val_loss: 1.0364\n",
  1589. "Epoch 4/5\n",
  1590. "291/291 [==============================] - 0s 1ms/step - loss: 0.8814 - val_loss: 0.8794\n",
  1591. "Epoch 5/5\n",
  1592. "291/291 [==============================] - 0s 1ms/step - loss: 0.7793 - val_loss: 0.7572\n",
  1593. "73/73 [==============================] - 0s 731us/step - loss: 0.7009\n",
  1594. "Epoch 1/5\n",
  1595. "291/291 [==============================] - 0s 1ms/step - loss: 1.0203 - val_loss: 0.7237\n",
  1596. "Epoch 2/5\n",
  1597. "291/291 [==============================] - 0s 1ms/step - loss: 0.6118 - val_loss: 0.6342\n",
  1598. "Epoch 3/5\n",
  1599. "291/291 [==============================] - 0s 1ms/step - loss: 0.5551 - val_loss: 0.5826\n",
  1600. "Epoch 4/5\n",
  1601. "291/291 [==============================] - 0s 1ms/step - loss: 0.5133 - val_loss: 0.5398\n",
  1602. "Epoch 5/5\n",
  1603. "291/291 [==============================] - 0s 1ms/step - loss: 0.4800 - val_loss: 0.5091\n",
  1604. "73/73 [==============================] - 0s 902us/step - loss: 0.4912\n",
  1605. "Epoch 1/5\n",
  1606. "291/291 [==============================] - 0s 1ms/step - loss: 1.8002 - val_loss: 1.0841\n",
  1607. "Epoch 2/5\n",
  1608. "291/291 [==============================] - 0s 1ms/step - loss: 0.8644 - val_loss: 0.8159\n",
  1609. "Epoch 3/5\n",
  1610. "291/291 [==============================] - 0s 1ms/step - loss: 0.7184 - val_loss: 0.7334\n",
  1611. "Epoch 4/5\n",
  1612. "291/291 [==============================] - 0s 1ms/step - loss: 0.6618 - val_loss: 0.6931\n",
  1613. "Epoch 5/5\n",
  1614. "291/291 [==============================] - 0s 1ms/step - loss: 0.6270 - val_loss: 0.6700\n",
  1615. "73/73 [==============================] - 0s 741us/step - loss: 0.6656\n",
  1616. "Epoch 1/5\n",
  1617. "291/291 [==============================] - 0s 1ms/step - loss: 0.9480 - val_loss: 0.6132\n",
  1618. "Epoch 2/5\n",
  1619. "291/291 [==============================] - 0s 1ms/step - loss: 0.5560 - val_loss: 0.5532\n",
  1620. "Epoch 3/5\n",
  1621. "291/291 [==============================] - 0s 1ms/step - loss: 0.5083 - val_loss: 0.5110\n",
  1622. "Epoch 4/5\n",
  1623. "291/291 [==============================] - 0s 1ms/step - loss: 0.4791 - val_loss: 0.4829\n",
  1624. "Epoch 5/5\n",
  1625. "291/291 [==============================] - 0s 1ms/step - loss: 0.4592 - val_loss: 0.4626\n",
  1626. "73/73 [==============================] - 0s 776us/step - loss: 0.4163\n",
  1627. "Epoch 1/5\n",
  1628. "291/291 [==============================] - 0s 1ms/step - loss: 1.1311 - val_loss: 0.6862\n",
  1629. "Epoch 2/5\n",
  1630. "291/291 [==============================] - 0s 1ms/step - loss: 0.6238 - val_loss: 0.6058\n",
  1631. "Epoch 3/5\n",
  1632. "291/291 [==============================] - 0s 1ms/step - loss: 0.5496 - val_loss: 0.5516\n",
  1633. "Epoch 4/5\n",
  1634. "291/291 [==============================] - 0s 1ms/step - loss: 0.5094 - val_loss: 0.5190\n",
  1635. "Epoch 5/5\n",
  1636. "291/291 [==============================] - 0s 1ms/step - loss: 0.4807 - val_loss: 0.4944\n",
  1637. "73/73 [==============================] - 0s 750us/step - loss: 0.4804\n",
  1638. "Epoch 1/5\n",
  1639. "291/291 [==============================] - 0s 1ms/step - loss: 1.1722 - val_loss: 0.6078\n",
  1640. "Epoch 2/5\n",
  1641. "291/291 [==============================] - 0s 1ms/step - loss: 0.5294 - val_loss: 0.5360\n",
  1642. "Epoch 3/5\n",
  1643. "291/291 [==============================] - 0s 1ms/step - loss: 0.4759 - val_loss: 0.4791\n",
  1644. "Epoch 4/5\n",
  1645. "291/291 [==============================] - 0s 1ms/step - loss: 0.4560 - val_loss: 0.4741\n",
  1646. "Epoch 5/5\n",
  1647. "291/291 [==============================] - 0s 1ms/step - loss: 0.4444 - val_loss: 0.4566\n",
  1648. "73/73 [==============================] - 0s 769us/step - loss: 0.4447\n",
  1649. "Epoch 1/5\n",
  1650. "291/291 [==============================] - 0s 1ms/step - loss: 1.3475 - val_loss: 0.9600\n",
  1651. "Epoch 2/5\n",
  1652. "291/291 [==============================] - 0s 1ms/step - loss: 0.8352 - val_loss: 0.8075\n",
  1653. "Epoch 3/5\n",
  1654. "291/291 [==============================] - 0s 1ms/step - loss: 0.7201 - val_loss: 0.7440\n",
  1655. "Epoch 4/5\n",
  1656. "291/291 [==============================] - 0s 1ms/step - loss: 0.6635 - val_loss: 0.6942\n",
  1657. "Epoch 5/5\n",
  1658. "291/291 [==============================] - 0s 1ms/step - loss: 0.6155 - val_loss: 0.6405\n",
  1659. "73/73 [==============================] - 0s 701us/step - loss: 0.6084\n",
  1660. "Epoch 1/5\n",
  1661. "291/291 [==============================] - 0s 1ms/step - loss: 0.8615 - val_loss: 0.6151\n",
  1662. "Epoch 2/5\n",
  1663. "291/291 [==============================] - 0s 1ms/step - loss: 0.5342 - val_loss: 0.5509\n",
  1664. "Epoch 3/5\n",
  1665. "291/291 [==============================] - 0s 1ms/step - loss: 0.4822 - val_loss: 0.4939\n",
  1666. "Epoch 4/5\n",
  1667. "291/291 [==============================] - 0s 1ms/step - loss: 0.4543 - val_loss: 0.4626\n",
  1668. "Epoch 5/5\n",
  1669. "291/291 [==============================] - 0s 1ms/step - loss: 0.4327 - val_loss: 0.4645\n",
  1670. "73/73 [==============================] - 0s 776us/step - loss: 0.4588\n",
  1671. "Epoch 1/5\n",
  1672. "291/291 [==============================] - 0s 1ms/step - loss: 6.1586 - val_loss: 5.3790\n",
  1673. "Epoch 2/5\n",
  1674. "291/291 [==============================] - 0s 1ms/step - loss: 4.5237 - val_loss: 4.0776\n",
  1675. "Epoch 3/5\n",
  1676. "291/291 [==============================] - 0s 1ms/step - loss: 3.5179 - val_loss: 3.2189\n",
  1677. "Epoch 4/5\n",
  1678. "291/291 [==============================] - 0s 1ms/step - loss: 2.8315 - val_loss: 2.6213\n",
  1679. "Epoch 5/5\n",
  1680. "291/291 [==============================] - 0s 1ms/step - loss: 2.3441 - val_loss: 2.1926\n",
  1681. "73/73 [==============================] - 0s 705us/step - loss: 1.9314\n",
  1682. "Epoch 1/5\n",
  1683. "291/291 [==============================] - 0s 1ms/step - loss: 7.8600 - val_loss: 6.4607\n",
  1684. "Epoch 2/5\n",
  1685. "291/291 [==============================] - 0s 1ms/step - loss: 5.5827 - val_loss: 4.9671\n",
  1686. "Epoch 3/5\n",
  1687. "291/291 [==============================] - 0s 1ms/step - loss: 4.3741 - val_loss: 3.9834\n",
  1688. "Epoch 4/5\n",
  1689. "291/291 [==============================] - 0s 1ms/step - loss: 3.5144 - val_loss: 3.2495\n",
  1690. "Epoch 5/5\n",
  1691. "291/291 [==============================] - 0s 1ms/step - loss: 2.8621 - val_loss: 2.6708\n",
  1692. "73/73 [==============================] - 0s 712us/step - loss: 2.6098\n",
  1693. "Epoch 1/5\n",
  1694. "291/291 [==============================] - 0s 1ms/step - loss: 7.8475 - val_loss: 6.9182\n",
  1695. "Epoch 2/5\n",
  1696. "291/291 [==============================] - 0s 1ms/step - loss: 5.6535 - val_loss: 5.2722\n",
  1697. "Epoch 3/5\n",
  1698. "291/291 [==============================] - 0s 1ms/step - loss: 4.4071 - val_loss: 4.2072\n",
  1699. "Epoch 4/5\n",
  1700. "291/291 [==============================] - 0s 1ms/step - loss: 3.5414 - val_loss: 3.4290\n",
  1701. "Epoch 5/5\n",
  1702. "291/291 [==============================] - 1s 2ms/step - loss: 2.8906 - val_loss: 2.8284\n",
  1703. "73/73 [==============================] - 0s 734us/step - loss: 2.6736\n",
  1704. "Epoch 1/5\n",
  1705. "291/291 [==============================] - 0s 1ms/step - loss: 7.7978 - val_loss: 6.8712\n",
  1706. "Epoch 2/5\n",
  1707. "291/291 [==============================] - 0s 1ms/step - loss: 5.4940 - val_loss: 5.0133\n",
  1708. "Epoch 3/5\n",
  1709. "291/291 [==============================] - 0s 1ms/step - loss: 4.2015 - val_loss: 3.9064\n",
  1710. "Epoch 4/5\n",
  1711. "291/291 [==============================] - 0s 1ms/step - loss: 3.3682 - val_loss: 3.1955\n",
  1712. "Epoch 5/5\n",
  1713. "291/291 [==============================] - 0s 1ms/step - loss: 2.8020 - val_loss: 2.7297\n",
  1714. "73/73 [==============================] - 0s 743us/step - loss: 2.4731\n",
  1715. "Epoch 1/5\n",
  1716. "291/291 [==============================] - 0s 1ms/step - loss: 5.0610 - val_loss: 4.8365\n",
  1717. "Epoch 2/5\n",
  1718. "291/291 [==============================] - 0s 1ms/step - loss: 4.2445 - val_loss: 4.0615\n",
  1719. "Epoch 3/5\n",
  1720. "291/291 [==============================] - 0s 1ms/step - loss: 3.5845 - val_loss: 3.4273\n",
  1721. "Epoch 4/5\n",
  1722. "291/291 [==============================] - 0s 1ms/step - loss: 3.0383 - val_loss: 2.8955\n",
  1723. "Epoch 5/5\n",
  1724. "291/291 [==============================] - 0s 1ms/step - loss: 2.5821 - val_loss: 2.4540\n",
  1725. "73/73 [==============================] - 0s 795us/step - loss: 2.4359\n",
  1726. "Epoch 1/5\n",
  1727. "291/291 [==============================] - 0s 1ms/step - loss: 5.0115 - val_loss: 4.9367\n",
  1728. "Epoch 2/5\n",
  1729. "291/291 [==============================] - 0s 1ms/step - loss: 4.5337 - val_loss: 4.4908\n",
  1730. "Epoch 3/5\n",
  1731. "291/291 [==============================] - 0s 1ms/step - loss: 4.1286 - val_loss: 4.1078\n"
  1732. ]
  1733. },
  1734. {
  1735. "name": "stdout",
  1736. "output_type": "stream",
  1737. "text": [
  1738. "Epoch 4/5\n",
  1739. "291/291 [==============================] - 0s 1ms/step - loss: 3.7801 - val_loss: 3.7770\n",
  1740. "Epoch 5/5\n",
  1741. "291/291 [==============================] - 0s 1ms/step - loss: 3.4782 - val_loss: 3.4883\n",
  1742. "73/73 [==============================] - 0s 759us/step - loss: 3.2049\n",
  1743. "Epoch 1/5\n",
  1744. "291/291 [==============================] - 0s 1ms/step - loss: 4.3891 - val_loss: 4.3838\n",
  1745. "Epoch 2/5\n",
  1746. "291/291 [==============================] - 0s 1ms/step - loss: 3.9677 - val_loss: 3.9649\n",
  1747. "Epoch 3/5\n",
  1748. "291/291 [==============================] - 0s 1ms/step - loss: 3.5987 - val_loss: 3.5919\n",
  1749. "Epoch 4/5\n",
  1750. "291/291 [==============================] - 0s 1ms/step - loss: 3.2732 - val_loss: 3.2638\n",
  1751. "Epoch 5/5\n",
  1752. "291/291 [==============================] - 0s 1ms/step - loss: 2.9879 - val_loss: 2.9737\n",
  1753. "73/73 [==============================] - 0s 756us/step - loss: 2.8622\n",
  1754. "Epoch 1/5\n",
  1755. "291/291 [==============================] - 0s 1ms/step - loss: 5.5032 - val_loss: 5.3251\n",
  1756. "Epoch 2/5\n",
  1757. "291/291 [==============================] - 0s 1ms/step - loss: 4.7847 - val_loss: 4.6464\n",
  1758. "Epoch 3/5\n",
  1759. "291/291 [==============================] - 0s 1ms/step - loss: 4.2243 - val_loss: 4.1080\n",
  1760. "Epoch 4/5\n",
  1761. "291/291 [==============================] - 0s 1ms/step - loss: 3.7831 - val_loss: 3.6825\n",
  1762. "Epoch 5/5\n",
  1763. "291/291 [==============================] - 0s 1ms/step - loss: 3.4300 - val_loss: 3.3364\n",
  1764. "73/73 [==============================] - 0s 741us/step - loss: 3.2063\n",
  1765. "Epoch 1/5\n",
  1766. "291/291 [==============================] - 0s 1ms/step - loss: 4.3948 - val_loss: 4.2735\n",
  1767. "Epoch 2/5\n",
  1768. "291/291 [==============================] - 0s 1ms/step - loss: 3.8824 - val_loss: 3.7927\n",
  1769. "Epoch 3/5\n",
  1770. "291/291 [==============================] - 0s 1ms/step - loss: 3.4405 - val_loss: 3.3761\n",
  1771. "Epoch 4/5\n",
  1772. "291/291 [==============================] - 0s 1ms/step - loss: 3.0552 - val_loss: 3.0141\n",
  1773. "Epoch 5/5\n",
  1774. "291/291 [==============================] - 0s 1ms/step - loss: 2.7174 - val_loss: 2.6972\n",
  1775. "73/73 [==============================] - 0s 745us/step - loss: 2.5201\n",
  1776. "Epoch 1/5\n",
  1777. "291/291 [==============================] - 0s 1ms/step - loss: 5.9653 - val_loss: 5.8882\n",
  1778. "Epoch 2/5\n",
  1779. "291/291 [==============================] - 0s 1ms/step - loss: 5.2793 - val_loss: 5.2471\n",
  1780. "Epoch 3/5\n",
  1781. "291/291 [==============================] - 0s 1ms/step - loss: 4.7084 - val_loss: 4.7055\n",
  1782. "Epoch 4/5\n",
  1783. "291/291 [==============================] - 0s 1ms/step - loss: 4.2204 - val_loss: 4.2358\n",
  1784. "Epoch 5/5\n",
  1785. "291/291 [==============================] - 0s 1ms/step - loss: 3.7954 - val_loss: 3.8254\n",
  1786. "73/73 [==============================] - 0s 749us/step - loss: 3.7291\n",
  1787. "Epoch 1/5\n",
  1788. "291/291 [==============================] - 0s 1ms/step - loss: 1.9685 - val_loss: 1.1124\n",
  1789. "Epoch 2/5\n",
  1790. "291/291 [==============================] - 0s 1ms/step - loss: 0.9086 - val_loss: 0.8290\n",
  1791. "Epoch 3/5\n",
  1792. "291/291 [==============================] - 0s 1ms/step - loss: 0.7516 - val_loss: 0.7621\n",
  1793. "Epoch 4/5\n",
  1794. "291/291 [==============================] - 0s 1ms/step - loss: 0.6977 - val_loss: 0.7265\n",
  1795. "Epoch 5/5\n",
  1796. "291/291 [==============================] - 0s 1ms/step - loss: 0.6659 - val_loss: 0.6921\n",
  1797. "73/73 [==============================] - 0s 742us/step - loss: 0.6005\n",
  1798. "Epoch 1/5\n",
  1799. "291/291 [==============================] - 0s 1ms/step - loss: 3.1963 - val_loss: 1.5580\n",
  1800. "Epoch 2/5\n",
  1801. "291/291 [==============================] - 0s 1ms/step - loss: 1.0259 - val_loss: 0.8754\n",
  1802. "Epoch 3/5\n",
  1803. "291/291 [==============================] - 0s 1ms/step - loss: 0.7683 - val_loss: 0.7825\n",
  1804. "Epoch 4/5\n",
  1805. "291/291 [==============================] - 0s 1ms/step - loss: 0.7145 - val_loss: 0.7427\n",
  1806. "Epoch 5/5\n",
  1807. "291/291 [==============================] - 0s 1ms/step - loss: 0.6839 - val_loss: 0.7145\n",
  1808. "73/73 [==============================] - 0s 745us/step - loss: 0.6608\n",
  1809. "Epoch 1/5\n",
  1810. "291/291 [==============================] - 0s 1ms/step - loss: 2.3852 - val_loss: 1.2502\n",
  1811. "Epoch 2/5\n",
  1812. "291/291 [==============================] - 0s 1ms/step - loss: 0.9576 - val_loss: 0.8568\n",
  1813. "Epoch 3/5\n",
  1814. "291/291 [==============================] - 0s 1ms/step - loss: 0.7460 - val_loss: 0.7411\n",
  1815. "Epoch 4/5\n",
  1816. "291/291 [==============================] - 0s 1ms/step - loss: 0.6740 - val_loss: 0.6844\n",
  1817. "Epoch 5/5\n",
  1818. "291/291 [==============================] - 0s 1ms/step - loss: 0.6355 - val_loss: 0.6534\n",
  1819. "73/73 [==============================] - 0s 678us/step - loss: 0.6334\n",
  1820. "Epoch 1/5\n",
  1821. "291/291 [==============================] - 0s 1ms/step - loss: 2.1238 - val_loss: 1.2594\n",
  1822. "Epoch 2/5\n",
  1823. "291/291 [==============================] - 0s 1ms/step - loss: 1.0309 - val_loss: 0.9895\n",
  1824. "Epoch 3/5\n",
  1825. "291/291 [==============================] - 0s 1ms/step - loss: 0.8577 - val_loss: 0.8801\n",
  1826. "Epoch 4/5\n",
  1827. "291/291 [==============================] - 0s 1ms/step - loss: 0.7830 - val_loss: 0.8265\n",
  1828. "Epoch 5/5\n",
  1829. "291/291 [==============================] - 0s 1ms/step - loss: 0.7413 - val_loss: 0.7910\n",
  1830. "73/73 [==============================] - 0s 755us/step - loss: 0.7701\n",
  1831. "Epoch 1/5\n",
  1832. "291/291 [==============================] - 0s 1ms/step - loss: 2.5229 - val_loss: 1.3573\n",
  1833. "Epoch 2/5\n",
  1834. "291/291 [==============================] - 0s 1ms/step - loss: 1.1336 - val_loss: 0.9946\n",
  1835. "Epoch 3/5\n",
  1836. "291/291 [==============================] - 0s 1ms/step - loss: 0.8970 - val_loss: 0.8638\n",
  1837. "Epoch 4/5\n",
  1838. "291/291 [==============================] - 0s 1ms/step - loss: 0.7955 - val_loss: 0.8096\n",
  1839. "Epoch 5/5\n",
  1840. "291/291 [==============================] - 0s 1ms/step - loss: 0.7456 - val_loss: 0.7789\n",
  1841. "73/73 [==============================] - 0s 773us/step - loss: 0.7643\n",
  1842. "Epoch 1/5\n",
  1843. "291/291 [==============================] - 0s 1ms/step - loss: 1.2120 - val_loss: 0.7290\n",
  1844. "Epoch 2/5\n",
  1845. "291/291 [==============================] - 0s 1ms/step - loss: 0.6061 - val_loss: 0.5779\n",
  1846. "Epoch 3/5\n",
  1847. "291/291 [==============================] - 0s 1ms/step - loss: 0.5221 - val_loss: 0.5214\n",
  1848. "Epoch 4/5\n",
  1849. "291/291 [==============================] - 0s 1ms/step - loss: 0.4830 - val_loss: 0.4880\n",
  1850. "Epoch 5/5\n",
  1851. "291/291 [==============================] - 0s 1ms/step - loss: 0.4586 - val_loss: 0.4659\n",
  1852. "73/73 [==============================] - 0s 794us/step - loss: 0.4235\n",
  1853. "Epoch 1/5\n",
  1854. "291/291 [==============================] - 0s 1ms/step - loss: 1.2511 - val_loss: 0.9892\n",
  1855. "Epoch 2/5\n",
  1856. "291/291 [==============================] - 0s 1ms/step - loss: 0.8195 - val_loss: 0.6624\n",
  1857. "Epoch 3/5\n",
  1858. "291/291 [==============================] - 0s 1ms/step - loss: 0.5581 - val_loss: 0.5669\n",
  1859. "Epoch 4/5\n",
  1860. "291/291 [==============================] - 0s 1ms/step - loss: 0.5078 - val_loss: 0.5251\n",
  1861. "Epoch 5/5\n",
  1862. "291/291 [==============================] - 0s 1ms/step - loss: 0.4784 - val_loss: 0.5039\n",
  1863. "73/73 [==============================] - 0s 782us/step - loss: 0.4810\n",
  1864. "Epoch 1/5\n",
  1865. "291/291 [==============================] - 0s 1ms/step - loss: 0.9067 - val_loss: 0.6472\n",
  1866. "Epoch 2/5\n",
  1867. "291/291 [==============================] - 0s 1ms/step - loss: 0.5555 - val_loss: 0.5485\n",
  1868. "Epoch 3/5\n",
  1869. "291/291 [==============================] - 0s 1ms/step - loss: 0.4997 - val_loss: 0.5108\n",
  1870. "Epoch 4/5\n",
  1871. "291/291 [==============================] - 0s 1ms/step - loss: 0.4699 - val_loss: 0.4810\n",
  1872. "Epoch 5/5\n",
  1873. "291/291 [==============================] - 0s 1ms/step - loss: 0.4597 - val_loss: 0.4688\n",
  1874. "73/73 [==============================] - 0s 773us/step - loss: 0.4569\n",
  1875. "Epoch 1/5\n",
  1876. "291/291 [==============================] - 0s 1ms/step - loss: 1.2706 - val_loss: 0.6720\n",
  1877. "Epoch 2/5\n",
  1878. "291/291 [==============================] - 0s 1ms/step - loss: 0.5706 - val_loss: 0.5938\n",
  1879. "Epoch 3/5\n",
  1880. "291/291 [==============================] - 0s 1ms/step - loss: 0.5258 - val_loss: 0.5550\n",
  1881. "Epoch 4/5\n",
  1882. "291/291 [==============================] - 0s 1ms/step - loss: 0.4977 - val_loss: 0.5244\n",
  1883. "Epoch 5/5\n",
  1884. "291/291 [==============================] - 0s 1ms/step - loss: 0.4749 - val_loss: 0.5032\n",
  1885. "73/73 [==============================] - 0s 737us/step - loss: 0.5040\n",
  1886. "Epoch 1/5\n",
  1887. "291/291 [==============================] - 0s 1ms/step - loss: 1.2697 - val_loss: 0.6926\n",
  1888. "Epoch 2/5\n",
  1889. "291/291 [==============================] - 0s 1ms/step - loss: 0.5834 - val_loss: 0.6005\n",
  1890. "Epoch 3/5\n",
  1891. "291/291 [==============================] - 0s 1ms/step - loss: 0.5354 - val_loss: 0.5558\n",
  1892. "Epoch 4/5\n",
  1893. "291/291 [==============================] - 0s 1ms/step - loss: 0.5024 - val_loss: 0.5205\n",
  1894. "Epoch 5/5\n",
  1895. "291/291 [==============================] - 0s 1ms/step - loss: 0.4770 - val_loss: 0.4957\n",
  1896. "73/73 [==============================] - 0s 813us/step - loss: 0.4989\n",
  1897. "Epoch 1/5\n"
  1898. ]
  1899. },
  1900. {
  1901. "name": "stdout",
  1902. "output_type": "stream",
  1903. "text": [
  1904. "291/291 [==============================] - 0s 1ms/step - loss: 0.8114 - val_loss: 0.5644\n",
  1905. "Epoch 2/5\n",
  1906. "291/291 [==============================] - 0s 1ms/step - loss: 0.4998 - val_loss: 0.4941\n",
  1907. "Epoch 3/5\n",
  1908. "291/291 [==============================] - 0s 1ms/step - loss: 0.4645 - val_loss: 0.4577\n",
  1909. "Epoch 4/5\n",
  1910. "291/291 [==============================] - 0s 1ms/step - loss: 0.4620 - val_loss: 0.4887\n",
  1911. "Epoch 5/5\n",
  1912. "291/291 [==============================] - 0s 1ms/step - loss: 0.5371 - val_loss: 0.4676\n",
  1913. "73/73 [==============================] - 0s 785us/step - loss: 0.4544\n",
  1914. "Epoch 1/5\n",
  1915. "291/291 [==============================] - 0s 1ms/step - loss: 0.9807 - val_loss: 0.6228\n",
  1916. "Epoch 2/5\n",
  1917. "291/291 [==============================] - 0s 1ms/step - loss: 0.5320 - val_loss: 0.5100\n",
  1918. "Epoch 3/5\n",
  1919. "291/291 [==============================] - 0s 1ms/step - loss: 0.4643 - val_loss: 0.4632\n",
  1920. "Epoch 4/5\n",
  1921. "291/291 [==============================] - 0s 1ms/step - loss: 0.4374 - val_loss: 0.4405\n",
  1922. "Epoch 5/5\n",
  1923. "291/291 [==============================] - 0s 1ms/step - loss: 0.4216 - val_loss: 0.4283\n",
  1924. "73/73 [==============================] - 0s 795us/step - loss: 0.4126\n",
  1925. "Epoch 1/5\n",
  1926. "291/291 [==============================] - 0s 1ms/step - loss: 1.3037 - val_loss: 0.7738\n",
  1927. "Epoch 2/5\n",
  1928. "291/291 [==============================] - 0s 1ms/step - loss: 0.5818 - val_loss: 0.5106\n",
  1929. "Epoch 3/5\n",
  1930. "291/291 [==============================] - 0s 1ms/step - loss: 0.4501 - val_loss: 0.4415\n",
  1931. "Epoch 4/5\n",
  1932. "291/291 [==============================] - 0s 1ms/step - loss: 0.4195 - val_loss: 0.4227\n",
  1933. "Epoch 5/5\n",
  1934. "291/291 [==============================] - 0s 1ms/step - loss: 0.4094 - val_loss: 0.4200\n",
  1935. "73/73 [==============================] - 0s 742us/step - loss: 0.4179\n",
  1936. "Epoch 1/5\n",
  1937. "291/291 [==============================] - 0s 1ms/step - loss: 0.7948 - val_loss: 0.6184\n",
  1938. "Epoch 2/5\n",
  1939. "291/291 [==============================] - 0s 1ms/step - loss: 0.5231 - val_loss: 0.5104\n",
  1940. "Epoch 3/5\n",
  1941. "291/291 [==============================] - 0s 1ms/step - loss: 0.4645 - val_loss: 0.4827\n",
  1942. "Epoch 4/5\n",
  1943. "291/291 [==============================] - 0s 1ms/step - loss: 0.4427 - val_loss: 0.4622\n",
  1944. "Epoch 5/5\n",
  1945. "291/291 [==============================] - 0s 1ms/step - loss: 0.4295 - val_loss: 0.4479\n",
  1946. "73/73 [==============================] - 0s 797us/step - loss: 0.4482\n",
  1947. "Epoch 1/5\n",
  1948. "291/291 [==============================] - 0s 1ms/step - loss: 0.8815 - val_loss: 1.8018\n",
  1949. "Epoch 2/5\n",
  1950. "291/291 [==============================] - 0s 1ms/step - loss: 2.1401 - val_loss: 1.0293\n",
  1951. "Epoch 3/5\n",
  1952. "291/291 [==============================] - 0s 1ms/step - loss: 0.8023 - val_loss: 0.7424\n",
  1953. "Epoch 4/5\n",
  1954. "291/291 [==============================] - 0s 1ms/step - loss: 0.6288 - val_loss: 0.6142\n",
  1955. "Epoch 5/5\n",
  1956. "291/291 [==============================] - 0s 1ms/step - loss: 0.5315 - val_loss: 0.5409\n",
  1957. "73/73 [==============================] - 0s 770us/step - loss: 0.5320\n",
  1958. "Epoch 1/5\n",
  1959. "291/291 [==============================] - 0s 1ms/step - loss: 5.0118 - val_loss: 4.6170\n",
  1960. "Epoch 2/5\n",
  1961. "291/291 [==============================] - 0s 1ms/step - loss: 3.9815 - val_loss: 3.6953\n",
  1962. "Epoch 3/5\n",
  1963. "291/291 [==============================] - 0s 1ms/step - loss: 3.1890 - val_loss: 2.9776\n",
  1964. "Epoch 4/5\n",
  1965. "291/291 [==============================] - 0s 1ms/step - loss: 2.5746 - val_loss: 2.4242\n",
  1966. "Epoch 5/5\n",
  1967. "291/291 [==============================] - 0s 1ms/step - loss: 2.1076 - val_loss: 2.0075\n",
  1968. "73/73 [==============================] - 0s 707us/step - loss: 1.8704\n",
  1969. "Epoch 1/5\n",
  1970. "291/291 [==============================] - 0s 1ms/step - loss: 4.9963 - val_loss: 4.3942\n",
  1971. "Epoch 2/5\n",
  1972. "291/291 [==============================] - 0s 1ms/step - loss: 3.5796 - val_loss: 3.2556\n",
  1973. "Epoch 3/5\n",
  1974. "291/291 [==============================] - 0s 1ms/step - loss: 2.7069 - val_loss: 2.5413\n",
  1975. "Epoch 4/5\n",
  1976. "291/291 [==============================] - 0s 1ms/step - loss: 2.1482 - val_loss: 2.0784\n",
  1977. "Epoch 5/5\n",
  1978. "291/291 [==============================] - 0s 1ms/step - loss: 1.7759 - val_loss: 1.7648\n",
  1979. "73/73 [==============================] - 0s 748us/step - loss: 1.6183\n",
  1980. "Epoch 1/5\n",
  1981. "291/291 [==============================] - 0s 1ms/step - loss: 4.7519 - val_loss: 4.0855\n",
  1982. "Epoch 2/5\n",
  1983. "291/291 [==============================] - 0s 1ms/step - loss: 3.2999 - val_loss: 2.9238\n",
  1984. "Epoch 3/5\n",
  1985. "291/291 [==============================] - 1s 2ms/step - loss: 2.4370 - val_loss: 2.2299\n",
  1986. "Epoch 4/5\n",
  1987. "291/291 [==============================] - 0s 1ms/step - loss: 1.9157 - val_loss: 1.8096\n",
  1988. "Epoch 5/5\n",
  1989. "291/291 [==============================] - 0s 1ms/step - loss: 1.5931 - val_loss: 1.5461\n",
  1990. "73/73 [==============================] - 0s 748us/step - loss: 1.4312\n",
  1991. "Epoch 1/5\n",
  1992. "291/291 [==============================] - 0s 1ms/step - loss: 4.9929 - val_loss: 4.6813\n",
  1993. "Epoch 2/5\n",
  1994. "291/291 [==============================] - 0s 1ms/step - loss: 4.0927 - val_loss: 3.9001\n",
  1995. "Epoch 3/5\n",
  1996. "291/291 [==============================] - 0s 1ms/step - loss: 3.4243 - val_loss: 3.3209\n",
  1997. "Epoch 4/5\n",
  1998. "291/291 [==============================] - 0s 1ms/step - loss: 2.9245 - val_loss: 2.8990\n",
  1999. "Epoch 5/5\n",
  2000. "291/291 [==============================] - 0s 1ms/step - loss: 2.5587 - val_loss: 2.5942\n",
  2001. "73/73 [==============================] - 0s 747us/step - loss: 2.6023\n",
  2002. "Epoch 1/5\n",
  2003. "291/291 [==============================] - 0s 1ms/step - loss: 5.2935 - val_loss: 4.8570\n",
  2004. "Epoch 2/5\n",
  2005. "291/291 [==============================] - 0s 1ms/step - loss: 4.1148 - val_loss: 3.7904\n",
  2006. "Epoch 3/5\n",
  2007. "291/291 [==============================] - 0s 1ms/step - loss: 3.2019 - val_loss: 2.9567\n",
  2008. "Epoch 4/5\n",
  2009. "291/291 [==============================] - 0s 1ms/step - loss: 2.4980 - val_loss: 2.3245\n",
  2010. "Epoch 5/5\n",
  2011. "291/291 [==============================] - 0s 1ms/step - loss: 1.9810 - val_loss: 1.8730\n",
  2012. "73/73 [==============================] - 0s 739us/step - loss: 1.8853\n",
  2013. "Epoch 1/5\n",
  2014. "291/291 [==============================] - 0s 1ms/step - loss: 4.9591 - val_loss: 4.7765\n",
  2015. "Epoch 2/5\n",
  2016. "291/291 [==============================] - 0s 1ms/step - loss: 4.3151 - val_loss: 4.1725\n",
  2017. "Epoch 3/5\n",
  2018. "291/291 [==============================] - 0s 1ms/step - loss: 3.7881 - val_loss: 3.6747\n",
  2019. "Epoch 4/5\n",
  2020. "291/291 [==============================] - 0s 1ms/step - loss: 3.3531 - val_loss: 3.2628\n",
  2021. "Epoch 5/5\n",
  2022. "291/291 [==============================] - 0s 1ms/step - loss: 2.9913 - val_loss: 2.9188\n",
  2023. "73/73 [==============================] - 0s 733us/step - loss: 2.9332\n",
  2024. "Epoch 1/5\n",
  2025. "291/291 [==============================] - 0s 1ms/step - loss: 5.1775 - val_loss: 5.0870\n",
  2026. "Epoch 2/5\n",
  2027. "291/291 [==============================] - 0s 1ms/step - loss: 4.6242 - val_loss: 4.5608\n",
  2028. "Epoch 3/5\n",
  2029. "291/291 [==============================] - 0s 1ms/step - loss: 4.1376 - val_loss: 4.0952\n",
  2030. "Epoch 4/5\n",
  2031. "291/291 [==============================] - 0s 1ms/step - loss: 3.7064 - val_loss: 3.6831\n",
  2032. "Epoch 5/5\n",
  2033. "291/291 [==============================] - 0s 1ms/step - loss: 3.3243 - val_loss: 3.3188\n",
  2034. "73/73 [==============================] - 0s 779us/step - loss: 3.2203\n",
  2035. "Epoch 1/5\n",
  2036. "291/291 [==============================] - 0s 1ms/step - loss: 6.4158 - val_loss: 5.9867\n",
  2037. "Epoch 2/5\n",
  2038. "291/291 [==============================] - 0s 1ms/step - loss: 5.1875 - val_loss: 4.9300\n",
  2039. "Epoch 3/5\n",
  2040. "291/291 [==============================] - 0s 1ms/step - loss: 4.3307 - val_loss: 4.1768\n",
  2041. "Epoch 4/5\n",
  2042. "291/291 [==============================] - 0s 1ms/step - loss: 3.7101 - val_loss: 3.6235\n",
  2043. "Epoch 5/5\n",
  2044. "291/291 [==============================] - 0s 1ms/step - loss: 3.2461 - val_loss: 3.2031\n",
  2045. "73/73 [==============================] - 0s 775us/step - loss: 3.0986\n",
  2046. "Epoch 1/5\n",
  2047. "291/291 [==============================] - 0s 1ms/step - loss: 4.4729 - val_loss: 4.2768\n",
  2048. "Epoch 2/5\n",
  2049. "291/291 [==============================] - 0s 1ms/step - loss: 3.8722 - val_loss: 3.7205\n",
  2050. "Epoch 3/5\n",
  2051. "291/291 [==============================] - 0s 1ms/step - loss: 3.3785 - val_loss: 3.2665\n",
  2052. "Epoch 4/5\n",
  2053. "291/291 [==============================] - 0s 1ms/step - loss: 2.9731 - val_loss: 2.8924\n",
  2054. "Epoch 5/5\n",
  2055. "291/291 [==============================] - 0s 1ms/step - loss: 2.6386 - val_loss: 2.5875\n",
  2056. "73/73 [==============================] - 0s 740us/step - loss: 2.6656\n",
  2057. "Epoch 1/5\n",
  2058. "291/291 [==============================] - 0s 1ms/step - loss: 4.4344 - val_loss: 4.4528\n",
  2059. "Epoch 2/5\n",
  2060. "291/291 [==============================] - 0s 1ms/step - loss: 3.9696 - val_loss: 4.0036\n",
  2061. "Epoch 3/5\n",
  2062. "291/291 [==============================] - 0s 1ms/step - loss: 3.5653 - val_loss: 3.6109\n"
  2063. ]
  2064. },
  2065. {
  2066. "name": "stdout",
  2067. "output_type": "stream",
  2068. "text": [
  2069. "Epoch 4/5\n",
  2070. "291/291 [==============================] - 0s 1ms/step - loss: 3.2116 - val_loss: 3.2672\n",
  2071. "Epoch 5/5\n",
  2072. "291/291 [==============================] - 0s 1ms/step - loss: 2.9053 - val_loss: 2.9670\n",
  2073. "73/73 [==============================] - 0s 734us/step - loss: 2.7470\n",
  2074. "Epoch 1/5\n",
  2075. "291/291 [==============================] - 0s 1ms/step - loss: 2.3381 - val_loss: 1.1153\n",
  2076. "Epoch 2/5\n",
  2077. "291/291 [==============================] - 0s 1ms/step - loss: 0.8662 - val_loss: 0.8397\n",
  2078. "Epoch 3/5\n",
  2079. "291/291 [==============================] - 0s 1ms/step - loss: 0.7522 - val_loss: 0.7736\n",
  2080. "Epoch 4/5\n",
  2081. "291/291 [==============================] - 0s 1ms/step - loss: 0.7131 - val_loss: 0.7398\n",
  2082. "Epoch 5/5\n",
  2083. "291/291 [==============================] - 0s 1ms/step - loss: 0.6854 - val_loss: 0.7113\n",
  2084. "73/73 [==============================] - 0s 742us/step - loss: 0.6311\n",
  2085. "Epoch 1/5\n",
  2086. "291/291 [==============================] - 0s 1ms/step - loss: 2.2561 - val_loss: 1.0241\n",
  2087. "Epoch 2/5\n",
  2088. "291/291 [==============================] - 0s 1ms/step - loss: 0.8306 - val_loss: 0.8050\n",
  2089. "Epoch 3/5\n",
  2090. "291/291 [==============================] - 0s 1ms/step - loss: 0.7231 - val_loss: 0.7439\n",
  2091. "Epoch 4/5\n",
  2092. "291/291 [==============================] - 0s 1ms/step - loss: 0.6818 - val_loss: 0.7067\n",
  2093. "Epoch 5/5\n",
  2094. "291/291 [==============================] - 0s 1ms/step - loss: 0.6502 - val_loss: 0.6766\n",
  2095. "73/73 [==============================] - 0s 742us/step - loss: 0.6316\n",
  2096. "Epoch 1/5\n",
  2097. "291/291 [==============================] - 0s 1ms/step - loss: 2.4945 - val_loss: 1.4076\n",
  2098. "Epoch 2/5\n",
  2099. "291/291 [==============================] - 0s 1ms/step - loss: 0.9687 - val_loss: 0.8534\n",
  2100. "Epoch 3/5\n",
  2101. "291/291 [==============================] - 0s 1ms/step - loss: 0.7364 - val_loss: 0.7533\n",
  2102. "Epoch 4/5\n",
  2103. "291/291 [==============================] - 0s 1ms/step - loss: 0.6875 - val_loss: 0.7237\n",
  2104. "Epoch 5/5\n",
  2105. "291/291 [==============================] - 0s 1ms/step - loss: 0.6646 - val_loss: 0.7047\n",
  2106. "73/73 [==============================] - 0s 723us/step - loss: 0.6433\n",
  2107. "Epoch 1/5\n",
  2108. "291/291 [==============================] - 0s 1ms/step - loss: 2.1617 - val_loss: 1.3695\n",
  2109. "Epoch 2/5\n",
  2110. "291/291 [==============================] - 0s 1ms/step - loss: 1.0031 - val_loss: 0.9240\n",
  2111. "Epoch 3/5\n",
  2112. "291/291 [==============================] - 0s 1ms/step - loss: 0.7751 - val_loss: 0.7954\n",
  2113. "Epoch 4/5\n",
  2114. "291/291 [==============================] - 0s 1ms/step - loss: 0.7037 - val_loss: 0.7504\n",
  2115. "Epoch 5/5\n",
  2116. "291/291 [==============================] - 0s 1ms/step - loss: 0.6678 - val_loss: 0.7184\n",
  2117. "73/73 [==============================] - 0s 743us/step - loss: 0.6919\n",
  2118. "Epoch 1/5\n",
  2119. "291/291 [==============================] - 0s 1ms/step - loss: 2.3779 - val_loss: 1.0763\n",
  2120. "Epoch 2/5\n",
  2121. "291/291 [==============================] - 0s 1ms/step - loss: 0.8187 - val_loss: 0.7963\n",
  2122. "Epoch 3/5\n",
  2123. "291/291 [==============================] - 0s 1ms/step - loss: 0.6833 - val_loss: 0.7262\n",
  2124. "Epoch 4/5\n",
  2125. "291/291 [==============================] - 0s 1ms/step - loss: 0.6367 - val_loss: 0.6852\n",
  2126. "Epoch 5/5\n",
  2127. "291/291 [==============================] - 0s 1ms/step - loss: 0.6085 - val_loss: 0.6579\n",
  2128. "73/73 [==============================] - 0s 725us/step - loss: 0.6309\n",
  2129. "Epoch 1/5\n",
  2130. "291/291 [==============================] - 1s 2ms/step - loss: 1.0614 - val_loss: 0.6549\n",
  2131. "Epoch 2/5\n",
  2132. "291/291 [==============================] - 0s 1ms/step - loss: 0.5763 - val_loss: 0.5728\n",
  2133. "Epoch 3/5\n",
  2134. "291/291 [==============================] - 0s 1ms/step - loss: 0.5164 - val_loss: 0.5159\n",
  2135. "Epoch 4/5\n",
  2136. "291/291 [==============================] - 0s 1ms/step - loss: 0.4774 - val_loss: 0.4805\n",
  2137. "Epoch 5/5\n",
  2138. "291/291 [==============================] - 0s 1ms/step - loss: 0.4504 - val_loss: 0.4548\n",
  2139. "73/73 [==============================] - 0s 711us/step - loss: 0.4067\n",
  2140. "Epoch 1/5\n",
  2141. "291/291 [==============================] - 0s 1ms/step - loss: 1.0348 - val_loss: 0.6440\n",
  2142. "Epoch 2/5\n",
  2143. "291/291 [==============================] - 0s 1ms/step - loss: 0.5472 - val_loss: 0.5333\n",
  2144. "Epoch 3/5\n",
  2145. "291/291 [==============================] - 0s 1ms/step - loss: 0.4788 - val_loss: 0.4913\n",
  2146. "Epoch 4/5\n",
  2147. "291/291 [==============================] - 0s 1ms/step - loss: 0.4522 - val_loss: 0.4811\n",
  2148. "Epoch 5/5\n",
  2149. "291/291 [==============================] - 0s 1ms/step - loss: 0.4368 - val_loss: 0.4566\n",
  2150. "73/73 [==============================] - 0s 742us/step - loss: 0.4366\n",
  2151. "Epoch 1/5\n",
  2152. "291/291 [==============================] - 0s 1ms/step - loss: 1.1848 - val_loss: 0.6867\n",
  2153. "Epoch 2/5\n",
  2154. "291/291 [==============================] - 0s 1ms/step - loss: 0.6337 - val_loss: 0.5823\n",
  2155. "Epoch 3/5\n",
  2156. "291/291 [==============================] - 0s 1ms/step - loss: 0.5195 - val_loss: 0.5370\n",
  2157. "Epoch 4/5\n",
  2158. "291/291 [==============================] - 0s 1ms/step - loss: 0.4860 - val_loss: 0.5067\n",
  2159. "Epoch 5/5\n",
  2160. "291/291 [==============================] - 0s 1ms/step - loss: 0.4633 - val_loss: 0.4842\n",
  2161. "73/73 [==============================] - 0s 769us/step - loss: 0.4617\n",
  2162. "Epoch 1/5\n",
  2163. "291/291 [==============================] - 0s 1ms/step - loss: 1.1639 - val_loss: 0.7197\n",
  2164. "Epoch 2/5\n",
  2165. "291/291 [==============================] - 0s 1ms/step - loss: 0.6331 - val_loss: 0.6613\n",
  2166. "Epoch 3/5\n",
  2167. "291/291 [==============================] - 0s 1ms/step - loss: 0.6715 - val_loss: 0.5386\n",
  2168. "Epoch 4/5\n",
  2169. "291/291 [==============================] - 0s 1ms/step - loss: 0.4791 - val_loss: 0.5033\n",
  2170. "Epoch 5/5\n",
  2171. "291/291 [==============================] - 0s 1ms/step - loss: 0.4572 - val_loss: 0.4787\n",
  2172. "73/73 [==============================] - 0s 785us/step - loss: 0.4762\n",
  2173. "Epoch 1/5\n",
  2174. "291/291 [==============================] - 0s 1ms/step - loss: 1.0270 - val_loss: 0.6476\n",
  2175. "Epoch 2/5\n",
  2176. "291/291 [==============================] - 0s 1ms/step - loss: 0.5398 - val_loss: 0.5397\n",
  2177. "Epoch 3/5\n",
  2178. "291/291 [==============================] - 0s 1ms/step - loss: 0.4812 - val_loss: 0.4949\n",
  2179. "Epoch 4/5\n",
  2180. "291/291 [==============================] - 0s 1ms/step - loss: 0.4523 - val_loss: 0.4699\n",
  2181. "Epoch 5/5\n",
  2182. "291/291 [==============================] - 0s 1ms/step - loss: 0.4380 - val_loss: 0.4514\n",
  2183. "73/73 [==============================] - 0s 756us/step - loss: 0.4447\n",
  2184. "Epoch 1/5\n",
  2185. "291/291 [==============================] - 0s 1ms/step - loss: 0.7495 - val_loss: 0.6997\n",
  2186. "Epoch 2/5\n",
  2187. "291/291 [==============================] - 0s 1ms/step - loss: 0.5457 - val_loss: 0.5050\n",
  2188. "Epoch 3/5\n",
  2189. "291/291 [==============================] - 0s 1ms/step - loss: 0.4546 - val_loss: 0.4584\n",
  2190. "Epoch 4/5\n",
  2191. "291/291 [==============================] - 0s 1ms/step - loss: 0.4297 - val_loss: 0.4342\n",
  2192. "Epoch 5/5\n",
  2193. "291/291 [==============================] - 0s 1ms/step - loss: 0.4084 - val_loss: 0.4056\n",
  2194. "73/73 [==============================] - 0s 755us/step - loss: 0.3613\n",
  2195. "Epoch 1/5\n",
  2196. "291/291 [==============================] - 0s 1ms/step - loss: 0.9667 - val_loss: 0.5910\n",
  2197. "Epoch 2/5\n",
  2198. "291/291 [==============================] - 0s 1ms/step - loss: 0.5323 - val_loss: 0.5016\n",
  2199. "Epoch 3/5\n",
  2200. "291/291 [==============================] - 0s 1ms/step - loss: 0.4581 - val_loss: 0.4575\n",
  2201. "Epoch 4/5\n",
  2202. "291/291 [==============================] - 0s 1ms/step - loss: 0.4247 - val_loss: 0.4287\n",
  2203. "Epoch 5/5\n",
  2204. "291/291 [==============================] - 0s 1ms/step - loss: 0.4072 - val_loss: 0.4158\n",
  2205. "73/73 [==============================] - 0s 777us/step - loss: 0.4019\n",
  2206. "Epoch 1/5\n",
  2207. "291/291 [==============================] - 0s 1ms/step - loss: 0.9655 - val_loss: 0.5448\n",
  2208. "Epoch 2/5\n",
  2209. "291/291 [==============================] - 0s 1ms/step - loss: 0.4861 - val_loss: 0.4841\n",
  2210. "Epoch 3/5\n",
  2211. "291/291 [==============================] - 0s 1ms/step - loss: 0.4452 - val_loss: 0.4581\n",
  2212. "Epoch 4/5\n",
  2213. "291/291 [==============================] - 0s 1ms/step - loss: 0.4193 - val_loss: 0.4270\n",
  2214. "Epoch 5/5\n",
  2215. "291/291 [==============================] - 0s 1ms/step - loss: 0.4039 - val_loss: 0.4250\n",
  2216. "73/73 [==============================] - 0s 784us/step - loss: 0.4071\n",
  2217. "Epoch 1/5\n",
  2218. "291/291 [==============================] - 0s 1ms/step - loss: 0.8438 - val_loss: 0.6067\n",
  2219. "Epoch 2/5\n",
  2220. "291/291 [==============================] - 0s 1ms/step - loss: 0.6505 - val_loss: 0.6540\n",
  2221. "Epoch 3/5\n",
  2222. "291/291 [==============================] - 0s 1ms/step - loss: 0.6769 - val_loss: 0.5327\n",
  2223. "Epoch 4/5\n",
  2224. "291/291 [==============================] - 0s 1ms/step - loss: 0.4426 - val_loss: 0.4589\n",
  2225. "Epoch 5/5\n",
  2226. "291/291 [==============================] - 0s 1ms/step - loss: 0.4146 - val_loss: 0.4284\n",
  2227. "73/73 [==============================] - 0s 761us/step - loss: 0.4282\n",
  2228. "Epoch 1/5\n"
  2229. ]
  2230. },
  2231. {
  2232. "name": "stdout",
  2233. "output_type": "stream",
  2234. "text": [
  2235. "291/291 [==============================] - 0s 1ms/step - loss: 0.7299 - val_loss: 0.5967\n",
  2236. "Epoch 2/5\n",
  2237. "291/291 [==============================] - 0s 1ms/step - loss: 0.4993 - val_loss: 0.5064\n",
  2238. "Epoch 3/5\n",
  2239. "291/291 [==============================] - 0s 1ms/step - loss: 0.4567 - val_loss: 0.4636\n",
  2240. "Epoch 4/5\n",
  2241. "291/291 [==============================] - 0s 1ms/step - loss: 0.4197 - val_loss: 0.4369\n",
  2242. "Epoch 5/5\n",
  2243. "291/291 [==============================] - 0s 1ms/step - loss: 0.4032 - val_loss: 0.4196\n",
  2244. "73/73 [==============================] - 0s 728us/step - loss: 0.4158\n",
  2245. "Epoch 1/5\n",
  2246. "291/291 [==============================] - 0s 1ms/step - loss: 5.6228 - val_loss: 5.0109\n",
  2247. "Epoch 2/5\n",
  2248. "291/291 [==============================] - 0s 1ms/step - loss: 4.2094 - val_loss: 3.8786\n",
  2249. "Epoch 3/5\n",
  2250. "291/291 [==============================] - 0s 1ms/step - loss: 3.2886 - val_loss: 3.1267\n",
  2251. "Epoch 4/5\n",
  2252. "291/291 [==============================] - 0s 1ms/step - loss: 2.6749 - val_loss: 2.6246\n",
  2253. "Epoch 5/5\n",
  2254. "291/291 [==============================] - 0s 1ms/step - loss: 2.2651 - val_loss: 2.2845\n",
  2255. "73/73 [==============================] - 0s 748us/step - loss: 2.0913\n",
  2256. "Epoch 1/5\n",
  2257. "291/291 [==============================] - 0s 1ms/step - loss: 5.2815 - val_loss: 4.8232\n",
  2258. "Epoch 2/5\n",
  2259. "291/291 [==============================] - 0s 1ms/step - loss: 3.9833 - val_loss: 3.7745\n",
  2260. "Epoch 3/5\n",
  2261. "291/291 [==============================] - 0s 1ms/step - loss: 3.1174 - val_loss: 3.0497\n",
  2262. "Epoch 4/5\n",
  2263. "291/291 [==============================] - 0s 1ms/step - loss: 2.5106 - val_loss: 2.5315\n",
  2264. "Epoch 5/5\n",
  2265. "291/291 [==============================] - 0s 1ms/step - loss: 2.0805 - val_loss: 2.1548\n",
  2266. "73/73 [==============================] - 0s 790us/step - loss: 1.8916\n",
  2267. "Epoch 1/5\n",
  2268. "291/291 [==============================] - 0s 1ms/step - loss: 3.9459 - val_loss: 3.4256\n",
  2269. "Epoch 2/5\n",
  2270. "291/291 [==============================] - 0s 1ms/step - loss: 2.7096 - val_loss: 2.4190\n",
  2271. "Epoch 3/5\n",
  2272. "291/291 [==============================] - 0s 1ms/step - loss: 1.9359 - val_loss: 1.7957\n",
  2273. "Epoch 4/5\n",
  2274. "291/291 [==============================] - 0s 1ms/step - loss: 1.4690 - val_loss: 1.4218\n",
  2275. "Epoch 5/5\n",
  2276. "291/291 [==============================] - 0s 1ms/step - loss: 1.1965 - val_loss: 1.1990\n",
  2277. "73/73 [==============================] - 0s 671us/step - loss: 1.0999\n",
  2278. "Epoch 1/5\n",
  2279. "291/291 [==============================] - 0s 1ms/step - loss: 4.3947 - val_loss: 3.7929\n",
  2280. "Epoch 2/5\n",
  2281. "291/291 [==============================] - 0s 1ms/step - loss: 3.1948 - val_loss: 2.7643\n",
  2282. "Epoch 3/5\n",
  2283. "291/291 [==============================] - 0s 1ms/step - loss: 2.3949 - val_loss: 2.1030\n",
  2284. "Epoch 4/5\n",
  2285. "291/291 [==============================] - 0s 1ms/step - loss: 1.8832 - val_loss: 1.6969\n",
  2286. "Epoch 5/5\n",
  2287. "291/291 [==============================] - 0s 1ms/step - loss: 1.5645 - val_loss: 1.4547\n",
  2288. "73/73 [==============================] - 0s 741us/step - loss: 1.6620\n",
  2289. "Epoch 1/5\n",
  2290. "291/291 [==============================] - 0s 1ms/step - loss: 4.3166 - val_loss: 3.7630\n",
  2291. "Epoch 2/5\n",
  2292. "291/291 [==============================] - 0s 1ms/step - loss: 3.1182 - val_loss: 2.7475\n",
  2293. "Epoch 3/5\n",
  2294. "291/291 [==============================] - 0s 1ms/step - loss: 2.3206 - val_loss: 2.0768\n",
  2295. "Epoch 4/5\n",
  2296. "291/291 [==============================] - 0s 1ms/step - loss: 1.8004 - val_loss: 1.6480\n",
  2297. "Epoch 5/5\n",
  2298. "291/291 [==============================] - 0s 1ms/step - loss: 1.4666 - val_loss: 1.3745\n",
  2299. "73/73 [==============================] - 0s 777us/step - loss: 1.3071\n",
  2300. "Epoch 1/5\n",
  2301. "291/291 [==============================] - 0s 1ms/step - loss: 6.1250 - val_loss: 5.8585\n",
  2302. "Epoch 2/5\n",
  2303. "291/291 [==============================] - 0s 1ms/step - loss: 5.2140 - val_loss: 5.0181\n",
  2304. "Epoch 3/5\n",
  2305. "291/291 [==============================] - 0s 1ms/step - loss: 4.4914 - val_loss: 4.3381\n",
  2306. "Epoch 4/5\n",
  2307. "291/291 [==============================] - 0s 1ms/step - loss: 3.9010 - val_loss: 3.7762\n",
  2308. "Epoch 5/5\n",
  2309. "291/291 [==============================] - 0s 1ms/step - loss: 3.4132 - val_loss: 3.3111\n",
  2310. "73/73 [==============================] - 0s 715us/step - loss: 3.0906\n",
  2311. "Epoch 1/5\n",
  2312. "291/291 [==============================] - 0s 1ms/step - loss: 5.1080 - val_loss: 4.9837\n",
  2313. "Epoch 2/5\n",
  2314. "291/291 [==============================] - 0s 1ms/step - loss: 4.4769 - val_loss: 4.4024\n",
  2315. "Epoch 3/5\n",
  2316. "291/291 [==============================] - 0s 1ms/step - loss: 3.9592 - val_loss: 3.9200\n",
  2317. "Epoch 4/5\n",
  2318. "291/291 [==============================] - 0s 1ms/step - loss: 3.5278 - val_loss: 3.5171\n",
  2319. "Epoch 5/5\n",
  2320. "291/291 [==============================] - 0s 1ms/step - loss: 3.1632 - val_loss: 3.1757\n",
  2321. "73/73 [==============================] - 0s 842us/step - loss: 2.9912\n",
  2322. "Epoch 1/5\n",
  2323. "291/291 [==============================] - 0s 1ms/step - loss: 5.4720 - val_loss: 5.3090\n",
  2324. "Epoch 2/5\n",
  2325. "291/291 [==============================] - 0s 1ms/step - loss: 4.6994 - val_loss: 4.6036\n",
  2326. "Epoch 3/5\n",
  2327. "291/291 [==============================] - 0s 1ms/step - loss: 4.0962 - val_loss: 4.0425\n",
  2328. "Epoch 4/5\n",
  2329. "291/291 [==============================] - 0s 1ms/step - loss: 3.6100 - val_loss: 3.5849\n",
  2330. "Epoch 5/5\n",
  2331. "291/291 [==============================] - 0s 1ms/step - loss: 3.2125 - val_loss: 3.2082\n",
  2332. "73/73 [==============================] - 0s 753us/step - loss: 3.0407\n",
  2333. "Epoch 1/5\n",
  2334. "291/291 [==============================] - 0s 1ms/step - loss: 4.4942 - val_loss: 4.3314\n",
  2335. "Epoch 2/5\n",
  2336. "291/291 [==============================] - 0s 1ms/step - loss: 3.7624 - val_loss: 3.7059\n",
  2337. "Epoch 3/5\n",
  2338. "291/291 [==============================] - 0s 1ms/step - loss: 3.1912 - val_loss: 3.2194\n",
  2339. "Epoch 4/5\n",
  2340. "291/291 [==============================] - 0s 1ms/step - loss: 2.7433 - val_loss: 2.8403\n",
  2341. "Epoch 5/5\n",
  2342. "291/291 [==============================] - 0s 1ms/step - loss: 2.3943 - val_loss: 2.5434\n",
  2343. "73/73 [==============================] - 0s 754us/step - loss: 2.7792\n",
  2344. "Epoch 1/5\n",
  2345. "291/291 [==============================] - 0s 1ms/step - loss: 4.9944 - val_loss: 4.9636\n",
  2346. "Epoch 2/5\n",
  2347. "291/291 [==============================] - 0s 1ms/step - loss: 4.4056 - val_loss: 4.3921\n",
  2348. "Epoch 3/5\n",
  2349. "291/291 [==============================] - 0s 1ms/step - loss: 3.9035 - val_loss: 3.8990\n",
  2350. "Epoch 4/5\n",
  2351. "291/291 [==============================] - 0s 1ms/step - loss: 3.4698 - val_loss: 3.4700\n",
  2352. "Epoch 5/5\n",
  2353. "291/291 [==============================] - 0s 1ms/step - loss: 3.0928 - val_loss: 3.0957\n",
  2354. "73/73 [==============================] - 0s 765us/step - loss: 3.0252\n",
  2355. "Epoch 1/5\n",
  2356. "291/291 [==============================] - 0s 1ms/step - loss: 2.0023 - val_loss: 0.9533\n",
  2357. "Epoch 2/5\n",
  2358. "291/291 [==============================] - 0s 1ms/step - loss: 0.8033 - val_loss: 0.7938\n",
  2359. "Epoch 3/5\n",
  2360. "291/291 [==============================] - 0s 1ms/step - loss: 0.7195 - val_loss: 0.7491\n",
  2361. "Epoch 4/5\n",
  2362. "291/291 [==============================] - 0s 1ms/step - loss: 0.6856 - val_loss: 0.7212\n",
  2363. "Epoch 5/5\n",
  2364. "291/291 [==============================] - 0s 1ms/step - loss: 0.6615 - val_loss: 0.6965\n",
  2365. "73/73 [==============================] - 0s 744us/step - loss: 0.6045\n",
  2366. "Epoch 1/5\n",
  2367. "291/291 [==============================] - 0s 1ms/step - loss: 2.2450 - val_loss: 0.9491\n",
  2368. "Epoch 2/5\n",
  2369. "291/291 [==============================] - 0s 1ms/step - loss: 0.7618 - val_loss: 0.7292\n",
  2370. "Epoch 3/5\n",
  2371. "291/291 [==============================] - 0s 1ms/step - loss: 0.6564 - val_loss: 0.6806\n",
  2372. "Epoch 4/5\n",
  2373. "291/291 [==============================] - 0s 1ms/step - loss: 0.6204 - val_loss: 0.6501\n",
  2374. "Epoch 5/5\n",
  2375. "291/291 [==============================] - 0s 1ms/step - loss: 0.5950 - val_loss: 0.6274\n",
  2376. "73/73 [==============================] - 0s 726us/step - loss: 0.5726\n",
  2377. "Epoch 1/5\n",
  2378. "291/291 [==============================] - 0s 1ms/step - loss: 2.2090 - val_loss: 1.0413\n",
  2379. "Epoch 2/5\n",
  2380. "291/291 [==============================] - 0s 1ms/step - loss: 0.8362 - val_loss: 0.7249\n",
  2381. "Epoch 3/5\n",
  2382. "291/291 [==============================] - 0s 1ms/step - loss: 0.6537 - val_loss: 0.6801\n",
  2383. "Epoch 4/5\n",
  2384. "291/291 [==============================] - 0s 1ms/step - loss: 0.6144 - val_loss: 0.6540\n",
  2385. "Epoch 5/5\n",
  2386. "291/291 [==============================] - 0s 1ms/step - loss: 0.5890 - val_loss: 0.6287\n",
  2387. "73/73 [==============================] - 0s 775us/step - loss: 0.5801\n",
  2388. "Epoch 1/5\n",
  2389. "291/291 [==============================] - 0s 1ms/step - loss: 2.3889 - val_loss: 1.1377\n",
  2390. "Epoch 2/5\n",
  2391. "291/291 [==============================] - 0s 1ms/step - loss: 0.8420 - val_loss: 0.7974\n",
  2392. "Epoch 3/5\n",
  2393. "291/291 [==============================] - 0s 1ms/step - loss: 0.7043 - val_loss: 0.7268\n"
  2394. ]
  2395. },
  2396. {
  2397. "name": "stdout",
  2398. "output_type": "stream",
  2399. "text": [
  2400. "Epoch 4/5\n",
  2401. "291/291 [==============================] - 0s 1ms/step - loss: 0.6557 - val_loss: 0.6920\n",
  2402. "Epoch 5/5\n",
  2403. "291/291 [==============================] - 0s 1ms/step - loss: 0.6232 - val_loss: 0.6650\n",
  2404. "73/73 [==============================] - 0s 738us/step - loss: 0.6350\n",
  2405. "Epoch 1/5\n",
  2406. "291/291 [==============================] - 0s 1ms/step - loss: 1.9025 - val_loss: 1.1555\n",
  2407. "Epoch 2/5\n",
  2408. "291/291 [==============================] - 0s 1ms/step - loss: 0.8186 - val_loss: 0.8194\n",
  2409. "Epoch 3/5\n",
  2410. "291/291 [==============================] - 0s 1ms/step - loss: 0.6931 - val_loss: 0.7417\n",
  2411. "Epoch 4/5\n",
  2412. "291/291 [==============================] - 0s 1ms/step - loss: 0.6443 - val_loss: 0.6950\n",
  2413. "Epoch 5/5\n",
  2414. "291/291 [==============================] - 0s 1ms/step - loss: 0.6123 - val_loss: 0.6622\n",
  2415. "73/73 [==============================] - 0s 759us/step - loss: 0.6224\n",
  2416. "Epoch 1/5\n",
  2417. "291/291 [==============================] - 0s 1ms/step - loss: 0.9533 - val_loss: 0.7139\n",
  2418. "Epoch 2/5\n",
  2419. "291/291 [==============================] - 0s 1ms/step - loss: 0.7184 - val_loss: 0.6025\n",
  2420. "Epoch 3/5\n",
  2421. "291/291 [==============================] - 0s 1ms/step - loss: 0.5380 - val_loss: 0.5395\n",
  2422. "Epoch 4/5\n",
  2423. "291/291 [==============================] - 0s 1ms/step - loss: 0.4826 - val_loss: 0.4921\n",
  2424. "Epoch 5/5\n",
  2425. "291/291 [==============================] - 0s 1ms/step - loss: 0.4485 - val_loss: 0.4574\n",
  2426. "73/73 [==============================] - 0s 734us/step - loss: 0.3998\n",
  2427. "Epoch 1/5\n",
  2428. "291/291 [==============================] - 0s 1ms/step - loss: 1.2287 - val_loss: 0.6932\n",
  2429. "Epoch 2/5\n",
  2430. "291/291 [==============================] - 0s 1ms/step - loss: 0.6001 - val_loss: 0.6137\n",
  2431. "Epoch 3/5\n",
  2432. "291/291 [==============================] - 0s 1ms/step - loss: 0.5416 - val_loss: 0.5587\n",
  2433. "Epoch 4/5\n",
  2434. "291/291 [==============================] - 0s 1ms/step - loss: 0.4960 - val_loss: 0.5179\n",
  2435. "Epoch 5/5\n",
  2436. "291/291 [==============================] - 0s 1ms/step - loss: 0.4686 - val_loss: 0.4812\n",
  2437. "73/73 [==============================] - 0s 935us/step - loss: 0.4532\n",
  2438. "Epoch 1/5\n",
  2439. "291/291 [==============================] - 0s 1ms/step - loss: 0.9267 - val_loss: 0.6546\n",
  2440. "Epoch 2/5\n",
  2441. "291/291 [==============================] - 0s 1ms/step - loss: 0.5586 - val_loss: 0.5487\n",
  2442. "Epoch 3/5\n",
  2443. "291/291 [==============================] - 0s 1ms/step - loss: 0.4945 - val_loss: 0.5146\n",
  2444. "Epoch 4/5\n",
  2445. "291/291 [==============================] - 1s 2ms/step - loss: 0.4687 - val_loss: 0.4833\n",
  2446. "Epoch 5/5\n",
  2447. "291/291 [==============================] - 0s 1ms/step - loss: 0.4487 - val_loss: 0.4622\n",
  2448. "73/73 [==============================] - 0s 754us/step - loss: 0.4462\n",
  2449. "Epoch 1/5\n",
  2450. "291/291 [==============================] - 0s 1ms/step - loss: 0.8807 - val_loss: 0.6649\n",
  2451. "Epoch 2/5\n",
  2452. "291/291 [==============================] - 0s 1ms/step - loss: 0.5498 - val_loss: 0.5828\n",
  2453. "Epoch 3/5\n",
  2454. "291/291 [==============================] - 0s 1ms/step - loss: 0.5006 - val_loss: 0.5351\n",
  2455. "Epoch 4/5\n",
  2456. "291/291 [==============================] - 0s 1ms/step - loss: 0.4734 - val_loss: 0.5161\n",
  2457. "Epoch 5/5\n",
  2458. "291/291 [==============================] - 0s 1ms/step - loss: 0.4523 - val_loss: 0.4945\n",
  2459. "73/73 [==============================] - 0s 683us/step - loss: 0.4653\n",
  2460. "Epoch 1/5\n",
  2461. "291/291 [==============================] - 0s 1ms/step - loss: 1.1786 - val_loss: 0.6675\n",
  2462. "Epoch 2/5\n",
  2463. "291/291 [==============================] - 0s 1ms/step - loss: 0.5539 - val_loss: 0.5520\n",
  2464. "Epoch 3/5\n",
  2465. "291/291 [==============================] - 0s 1ms/step - loss: 0.4869 - val_loss: 0.5067\n",
  2466. "Epoch 4/5\n",
  2467. "291/291 [==============================] - 0s 1ms/step - loss: 0.4525 - val_loss: 0.4721\n",
  2468. "Epoch 5/5\n",
  2469. "291/291 [==============================] - 0s 1ms/step - loss: 0.4284 - val_loss: 0.4444\n",
  2470. "73/73 [==============================] - 0s 766us/step - loss: 0.4469\n",
  2471. "Epoch 1/5\n",
  2472. "291/291 [==============================] - 0s 1ms/step - loss: 0.7991 - val_loss: 1.7612\n",
  2473. "Epoch 2/5\n",
  2474. "291/291 [==============================] - 0s 1ms/step - loss: 0.5986 - val_loss: 0.6048\n",
  2475. "Epoch 3/5\n",
  2476. "291/291 [==============================] - 0s 1ms/step - loss: 0.5762 - val_loss: 0.4676\n",
  2477. "Epoch 4/5\n",
  2478. "291/291 [==============================] - 0s 1ms/step - loss: 0.4401 - val_loss: 0.4329\n",
  2479. "Epoch 5/5\n",
  2480. "291/291 [==============================] - 0s 1ms/step - loss: 0.4164 - val_loss: 0.4159\n",
  2481. "73/73 [==============================] - 0s 755us/step - loss: 0.4005\n",
  2482. "Epoch 1/5\n",
  2483. "291/291 [==============================] - 0s 1ms/step - loss: 0.9572 - val_loss: 0.6219\n",
  2484. "Epoch 2/5\n",
  2485. "291/291 [==============================] - 0s 1ms/step - loss: 0.5170 - val_loss: 0.5151\n",
  2486. "Epoch 3/5\n",
  2487. "291/291 [==============================] - 0s 1ms/step - loss: 0.4572 - val_loss: 0.4713\n",
  2488. "Epoch 4/5\n",
  2489. "291/291 [==============================] - 0s 1ms/step - loss: 0.4272 - val_loss: 0.4421\n",
  2490. "Epoch 5/5\n",
  2491. "291/291 [==============================] - 0s 1ms/step - loss: 0.4037 - val_loss: 0.4118\n",
  2492. "73/73 [==============================] - 0s 758us/step - loss: 0.3930\n",
  2493. "Epoch 1/5\n",
  2494. "291/291 [==============================] - 0s 1ms/step - loss: 1.3686 - val_loss: 0.8028\n",
  2495. "Epoch 2/5\n",
  2496. "291/291 [==============================] - 0s 1ms/step - loss: 0.9956 - val_loss: 0.4716\n",
  2497. "Epoch 3/5\n",
  2498. "291/291 [==============================] - 0s 1ms/step - loss: 0.4191 - val_loss: 0.4250\n",
  2499. "Epoch 4/5\n",
  2500. "291/291 [==============================] - 0s 1ms/step - loss: 0.4006 - val_loss: 0.4051\n",
  2501. "Epoch 5/5\n",
  2502. "291/291 [==============================] - 0s 1ms/step - loss: 0.3874 - val_loss: 0.4005\n",
  2503. "73/73 [==============================] - 0s 770us/step - loss: 0.3902\n",
  2504. "Epoch 1/5\n",
  2505. "291/291 [==============================] - 0s 1ms/step - loss: 0.7477 - val_loss: 0.5826\n",
  2506. "Epoch 2/5\n",
  2507. "291/291 [==============================] - 0s 1ms/step - loss: 0.4865 - val_loss: 0.5033\n",
  2508. "Epoch 3/5\n",
  2509. "291/291 [==============================] - 0s 1ms/step - loss: 0.4655 - val_loss: 0.6109\n",
  2510. "Epoch 4/5\n",
  2511. "291/291 [==============================] - 0s 1ms/step - loss: 0.5462 - val_loss: 0.4520\n",
  2512. "Epoch 5/5\n",
  2513. "291/291 [==============================] - 0s 1ms/step - loss: 0.4133 - val_loss: 0.4261\n",
  2514. "73/73 [==============================] - 0s 749us/step - loss: 0.4355\n",
  2515. "Epoch 1/5\n",
  2516. "291/291 [==============================] - 0s 1ms/step - loss: 1.4955 - val_loss: 1.1431\n",
  2517. "Epoch 2/5\n",
  2518. "291/291 [==============================] - 0s 1ms/step - loss: 1.2784 - val_loss: 0.4875\n",
  2519. "Epoch 3/5\n",
  2520. "291/291 [==============================] - 0s 1ms/step - loss: 0.4232 - val_loss: 0.4448\n",
  2521. "Epoch 4/5\n",
  2522. "291/291 [==============================] - 0s 1ms/step - loss: 0.3824 - val_loss: 0.4004\n",
  2523. "Epoch 5/5\n",
  2524. "291/291 [==============================] - 0s 1ms/step - loss: 0.3608 - val_loss: 0.3813\n",
  2525. "73/73 [==============================] - 0s 738us/step - loss: 0.3868\n",
  2526. "Epoch 1/5\n",
  2527. "291/291 [==============================] - 0s 1ms/step - loss: 5.9880 - val_loss: 5.7623\n",
  2528. "Epoch 2/5\n",
  2529. "291/291 [==============================] - 0s 1ms/step - loss: 5.2137 - val_loss: 5.1346\n",
  2530. "Epoch 3/5\n",
  2531. "291/291 [==============================] - 0s 1ms/step - loss: 4.6754 - val_loss: 4.6406\n",
  2532. "Epoch 4/5\n",
  2533. "291/291 [==============================] - 0s 1ms/step - loss: 4.2372 - val_loss: 4.2291\n",
  2534. "Epoch 5/5\n",
  2535. "291/291 [==============================] - 0s 1ms/step - loss: 3.8678 - val_loss: 3.8781\n",
  2536. "73/73 [==============================] - 0s 732us/step - loss: 3.5569\n",
  2537. "Epoch 1/5\n",
  2538. "291/291 [==============================] - 0s 1ms/step - loss: 4.7701 - val_loss: 4.5420\n",
  2539. "Epoch 2/5\n",
  2540. "291/291 [==============================] - 0s 1ms/step - loss: 4.0600 - val_loss: 3.8634\n",
  2541. "Epoch 3/5\n",
  2542. "291/291 [==============================] - 0s 1ms/step - loss: 3.4736 - val_loss: 3.3011\n",
  2543. "Epoch 4/5\n",
  2544. "291/291 [==============================] - 0s 1ms/step - loss: 3.0006 - val_loss: 2.8689\n",
  2545. "Epoch 5/5\n",
  2546. "291/291 [==============================] - 0s 1ms/step - loss: 2.6321 - val_loss: 2.5144\n",
  2547. "73/73 [==============================] - 0s 741us/step - loss: 2.4666\n",
  2548. "Epoch 1/5\n",
  2549. "291/291 [==============================] - 0s 1ms/step - loss: 6.2465 - val_loss: 5.9153\n",
  2550. "Epoch 2/5\n",
  2551. "291/291 [==============================] - 0s 1ms/step - loss: 5.2110 - val_loss: 5.0936\n",
  2552. "Epoch 3/5\n",
  2553. "291/291 [==============================] - 0s 1ms/step - loss: 4.5468 - val_loss: 4.5148\n",
  2554. "Epoch 4/5\n",
  2555. "291/291 [==============================] - 0s 1ms/step - loss: 4.0572 - val_loss: 4.0692\n",
  2556. "Epoch 5/5\n",
  2557. "291/291 [==============================] - 0s 1ms/step - loss: 3.6687 - val_loss: 3.7055\n",
  2558. "73/73 [==============================] - 0s 841us/step - loss: 3.5474\n",
  2559. "Epoch 1/5\n"
  2560. ]
  2561. },
  2562. {
  2563. "name": "stdout",
  2564. "output_type": "stream",
  2565. "text": [
  2566. "291/291 [==============================] - 0s 1ms/step - loss: 3.9042 - val_loss: 3.6606\n",
  2567. "Epoch 2/5\n",
  2568. "291/291 [==============================] - 0s 1ms/step - loss: 3.2033 - val_loss: 3.0084\n",
  2569. "Epoch 3/5\n",
  2570. "291/291 [==============================] - 0s 1ms/step - loss: 2.6208 - val_loss: 2.4856\n",
  2571. "Epoch 4/5\n",
  2572. "291/291 [==============================] - 0s 1ms/step - loss: 2.1696 - val_loss: 2.0959\n",
  2573. "Epoch 5/5\n",
  2574. "291/291 [==============================] - 0s 1ms/step - loss: 1.8462 - val_loss: 1.8225\n",
  2575. "73/73 [==============================] - 0s 768us/step - loss: 1.8580\n",
  2576. "Epoch 1/5\n",
  2577. "291/291 [==============================] - 0s 1ms/step - loss: 5.1809 - val_loss: 5.2159\n",
  2578. "Epoch 2/5\n",
  2579. "291/291 [==============================] - 0s 1ms/step - loss: 4.7321 - val_loss: 4.7815\n",
  2580. "Epoch 3/5\n",
  2581. "291/291 [==============================] - 0s 1ms/step - loss: 4.3322 - val_loss: 4.3933\n",
  2582. "Epoch 4/5\n",
  2583. "291/291 [==============================] - 0s 1ms/step - loss: 3.9754 - val_loss: 4.0465\n",
  2584. "Epoch 5/5\n",
  2585. "291/291 [==============================] - 0s 1ms/step - loss: 3.6573 - val_loss: 3.7380\n",
  2586. "73/73 [==============================] - 0s 752us/step - loss: 3.6584\n",
  2587. "Epoch 1/5\n",
  2588. "291/291 [==============================] - 0s 1ms/step - loss: 3.9606 - val_loss: 3.9413\n",
  2589. "Epoch 2/5\n",
  2590. "291/291 [==============================] - 0s 1ms/step - loss: 3.5617 - val_loss: 3.5537\n",
  2591. "Epoch 3/5\n",
  2592. "291/291 [==============================] - 0s 1ms/step - loss: 3.2133 - val_loss: 3.2285\n",
  2593. "Epoch 4/5\n",
  2594. "291/291 [==============================] - 0s 1ms/step - loss: 2.9256 - val_loss: 2.9599\n",
  2595. "Epoch 5/5\n",
  2596. "291/291 [==============================] - 0s 1ms/step - loss: 2.6898 - val_loss: 2.7389\n",
  2597. "73/73 [==============================] - 0s 802us/step - loss: 2.6559\n",
  2598. "Epoch 1/5\n",
  2599. "291/291 [==============================] - 0s 1ms/step - loss: 3.6546 - val_loss: 3.3819\n",
  2600. "Epoch 2/5\n",
  2601. "291/291 [==============================] - 0s 1ms/step - loss: 2.7832 - val_loss: 2.5997\n",
  2602. "Epoch 3/5\n",
  2603. "291/291 [==============================] - 0s 1ms/step - loss: 2.1379 - val_loss: 2.0510\n",
  2604. "Epoch 4/5\n",
  2605. "291/291 [==============================] - 0s 1ms/step - loss: 1.7024 - val_loss: 1.6962\n",
  2606. "Epoch 5/5\n",
  2607. "291/291 [==============================] - 0s 1ms/step - loss: 1.4298 - val_loss: 1.4806\n",
  2608. "73/73 [==============================] - 0s 768us/step - loss: 1.3719\n",
  2609. "Epoch 1/5\n",
  2610. "291/291 [==============================] - 0s 1ms/step - loss: 4.7044 - val_loss: 4.7553\n",
  2611. "Epoch 2/5\n",
  2612. "291/291 [==============================] - 0s 1ms/step - loss: 4.4115 - val_loss: 4.4583\n",
  2613. "Epoch 3/5\n",
  2614. "291/291 [==============================] - 0s 1ms/step - loss: 4.1317 - val_loss: 4.1766\n",
  2615. "Epoch 4/5\n",
  2616. "291/291 [==============================] - 0s 1ms/step - loss: 3.8679 - val_loss: 3.9124\n",
  2617. "Epoch 5/5\n",
  2618. "291/291 [==============================] - 0s 1ms/step - loss: 3.6213 - val_loss: 3.6679\n",
  2619. "73/73 [==============================] - 0s 789us/step - loss: 3.5700\n",
  2620. "Epoch 1/5\n",
  2621. "291/291 [==============================] - 0s 1ms/step - loss: 6.4483 - val_loss: 6.3384\n",
  2622. "Epoch 2/5\n",
  2623. "291/291 [==============================] - 0s 1ms/step - loss: 5.7673 - val_loss: 5.7199\n",
  2624. "Epoch 3/5\n",
  2625. "291/291 [==============================] - 0s 1ms/step - loss: 5.2470 - val_loss: 5.2353\n",
  2626. "Epoch 4/5\n",
  2627. "291/291 [==============================] - 0s 1ms/step - loss: 4.8259 - val_loss: 4.8348\n",
  2628. "Epoch 5/5\n",
  2629. "291/291 [==============================] - 0s 1ms/step - loss: 4.4692 - val_loss: 4.4917\n",
  2630. "73/73 [==============================] - 0s 712us/step - loss: 4.1401\n",
  2631. "Epoch 1/5\n",
  2632. "291/291 [==============================] - 0s 1ms/step - loss: 5.1407 - val_loss: 5.2478\n",
  2633. "Epoch 2/5\n",
  2634. "291/291 [==============================] - 0s 1ms/step - loss: 4.8255 - val_loss: 4.9199\n",
  2635. "Epoch 3/5\n",
  2636. "291/291 [==============================] - 0s 1ms/step - loss: 4.5027 - val_loss: 4.5807\n",
  2637. "Epoch 4/5\n",
  2638. "291/291 [==============================] - 0s 1ms/step - loss: 4.1684 - val_loss: 4.2296\n",
  2639. "Epoch 5/5\n",
  2640. "291/291 [==============================] - 0s 1ms/step - loss: 3.8238 - val_loss: 3.8692\n",
  2641. "73/73 [==============================] - 0s 756us/step - loss: 3.7946\n",
  2642. "Epoch 1/5\n",
  2643. "291/291 [==============================] - 0s 1ms/step - loss: 3.8164 - val_loss: 2.2268\n",
  2644. "Epoch 2/5\n",
  2645. "291/291 [==============================] - 0s 1ms/step - loss: 1.5699 - val_loss: 1.4041\n",
  2646. "Epoch 3/5\n",
  2647. "291/291 [==============================] - 0s 1ms/step - loss: 1.2547 - val_loss: 1.2807\n",
  2648. "Epoch 4/5\n",
  2649. "291/291 [==============================] - 0s 1ms/step - loss: 1.1635 - val_loss: 1.1822\n",
  2650. "Epoch 5/5\n",
  2651. "291/291 [==============================] - 0s 1ms/step - loss: 1.0655 - val_loss: 1.0599\n",
  2652. "73/73 [==============================] - 0s 769us/step - loss: 0.9810\n",
  2653. "Epoch 1/5\n",
  2654. "291/291 [==============================] - 0s 1ms/step - loss: 2.1112 - val_loss: 0.9361\n",
  2655. "Epoch 2/5\n",
  2656. "291/291 [==============================] - 0s 1ms/step - loss: 0.9073 - val_loss: 0.8002\n",
  2657. "Epoch 3/5\n",
  2658. "291/291 [==============================] - 0s 1ms/step - loss: 0.7622 - val_loss: 0.7562\n",
  2659. "Epoch 4/5\n",
  2660. "291/291 [==============================] - 0s 1ms/step - loss: 0.7229 - val_loss: 0.7316\n",
  2661. "Epoch 5/5\n",
  2662. "291/291 [==============================] - 0s 1ms/step - loss: 0.6987 - val_loss: 0.7142\n",
  2663. "73/73 [==============================] - 0s 736us/step - loss: 0.6631\n",
  2664. "Epoch 1/5\n",
  2665. "291/291 [==============================] - 0s 1ms/step - loss: 4.0735 - val_loss: 2.7662\n",
  2666. "Epoch 2/5\n",
  2667. "291/291 [==============================] - 0s 1ms/step - loss: 2.0523 - val_loss: 1.8406\n",
  2668. "Epoch 3/5\n",
  2669. "291/291 [==============================] - 0s 1ms/step - loss: 1.5535 - val_loss: 1.5643\n",
  2670. "Epoch 4/5\n",
  2671. "291/291 [==============================] - 0s 1ms/step - loss: 1.3983 - val_loss: 1.4638\n",
  2672. "Epoch 5/5\n",
  2673. "291/291 [==============================] - 0s 1ms/step - loss: 1.3375 - val_loss: 1.4213\n",
  2674. "73/73 [==============================] - 0s 733us/step - loss: 1.3532\n",
  2675. "Epoch 1/5\n",
  2676. "291/291 [==============================] - 0s 1ms/step - loss: 3.1647 - val_loss: 1.2594\n",
  2677. "Epoch 2/5\n",
  2678. "291/291 [==============================] - 0s 1ms/step - loss: 0.8901 - val_loss: 0.7461\n",
  2679. "Epoch 3/5\n",
  2680. "291/291 [==============================] - 0s 1ms/step - loss: 0.7112 - val_loss: 0.6847\n",
  2681. "Epoch 4/5\n",
  2682. "291/291 [==============================] - 0s 1ms/step - loss: 0.6512 - val_loss: 0.6600\n",
  2683. "Epoch 5/5\n",
  2684. "291/291 [==============================] - 0s 1ms/step - loss: 0.6202 - val_loss: 0.6429\n",
  2685. "73/73 [==============================] - 0s 760us/step - loss: 0.6105\n",
  2686. "Epoch 1/5\n",
  2687. "291/291 [==============================] - 0s 1ms/step - loss: 3.6196 - val_loss: 2.1105\n",
  2688. "Epoch 2/5\n",
  2689. "291/291 [==============================] - 0s 1ms/step - loss: 1.2773 - val_loss: 1.0128\n",
  2690. "Epoch 3/5\n",
  2691. "291/291 [==============================] - 0s 1ms/step - loss: 0.8684 - val_loss: 0.8572\n",
  2692. "Epoch 4/5\n",
  2693. "291/291 [==============================] - 0s 1ms/step - loss: 0.7730 - val_loss: 0.7800\n",
  2694. "Epoch 5/5\n",
  2695. "291/291 [==============================] - 0s 1ms/step - loss: 0.7122 - val_loss: 0.7340\n",
  2696. "73/73 [==============================] - 0s 767us/step - loss: 0.7272\n",
  2697. "Epoch 1/5\n",
  2698. "291/291 [==============================] - 0s 1ms/step - loss: 2.1645 - val_loss: 1.4047\n",
  2699. "Epoch 2/5\n",
  2700. "291/291 [==============================] - 0s 1ms/step - loss: 1.3185 - val_loss: 1.3768\n",
  2701. "Epoch 3/5\n",
  2702. "291/291 [==============================] - 0s 1ms/step - loss: 1.3059 - val_loss: 1.3599\n",
  2703. "Epoch 4/5\n",
  2704. "291/291 [==============================] - 0s 1ms/step - loss: 1.2536 - val_loss: 1.2030\n",
  2705. "Epoch 5/5\n",
  2706. "291/291 [==============================] - 0s 1ms/step - loss: 0.9006 - val_loss: 0.7378\n",
  2707. "73/73 [==============================] - 0s 745us/step - loss: 0.6701\n",
  2708. "Epoch 1/5\n",
  2709. "291/291 [==============================] - 0s 1ms/step - loss: 1.8391 - val_loss: 1.0291\n",
  2710. "Epoch 2/5\n",
  2711. "291/291 [==============================] - 0s 1ms/step - loss: 0.7378 - val_loss: 0.6757\n",
  2712. "Epoch 3/5\n",
  2713. "291/291 [==============================] - 0s 1ms/step - loss: 0.6283 - val_loss: 0.6389\n",
  2714. "Epoch 4/5\n",
  2715. "291/291 [==============================] - 0s 1ms/step - loss: 0.5868 - val_loss: 0.6022\n",
  2716. "Epoch 5/5\n",
  2717. "291/291 [==============================] - 0s 1ms/step - loss: 0.5633 - val_loss: 0.5759\n",
  2718. "73/73 [==============================] - 0s 725us/step - loss: 0.5503\n",
  2719. "Epoch 1/5\n",
  2720. "291/291 [==============================] - 0s 1ms/step - loss: 1.3896 - val_loss: 0.8116\n",
  2721. "Epoch 2/5\n",
  2722. "291/291 [==============================] - 0s 1ms/step - loss: 0.5921 - val_loss: 0.5399\n",
  2723. "Epoch 3/5\n",
  2724. "291/291 [==============================] - 0s 1ms/step - loss: 0.4832 - val_loss: 0.4902\n"
  2725. ]
  2726. },
  2727. {
  2728. "name": "stdout",
  2729. "output_type": "stream",
  2730. "text": [
  2731. "Epoch 4/5\n",
  2732. "291/291 [==============================] - 0s 1ms/step - loss: 0.4633 - val_loss: 0.4758\n",
  2733. "Epoch 5/5\n",
  2734. "291/291 [==============================] - 0s 1ms/step - loss: 0.4485 - val_loss: 0.4555\n",
  2735. "73/73 [==============================] - 0s 728us/step - loss: 0.4475\n",
  2736. "Epoch 1/5\n",
  2737. "291/291 [==============================] - 0s 1ms/step - loss: 1.8986 - val_loss: 1.5373\n",
  2738. "Epoch 2/5\n",
  2739. "291/291 [==============================] - 0s 1ms/step - loss: 1.2718 - val_loss: 1.2268\n",
  2740. "Epoch 3/5\n",
  2741. "291/291 [==============================] - 0s 1ms/step - loss: 1.1360 - val_loss: 1.1021\n",
  2742. "Epoch 4/5\n",
  2743. "291/291 [==============================] - 0s 1ms/step - loss: 0.9994 - val_loss: 0.9501\n",
  2744. "Epoch 5/5\n",
  2745. "291/291 [==============================] - 0s 1ms/step - loss: 0.8440 - val_loss: 0.7985\n",
  2746. "73/73 [==============================] - 0s 743us/step - loss: 0.7443\n",
  2747. "Epoch 1/5\n",
  2748. "291/291 [==============================] - 0s 1ms/step - loss: 1.8063 - val_loss: 1.7672\n",
  2749. "Epoch 2/5\n",
  2750. "291/291 [==============================] - 1s 2ms/step - loss: 1.2334 - val_loss: 1.2109\n",
  2751. "Epoch 3/5\n",
  2752. "291/291 [==============================] - 0s 1ms/step - loss: 0.9533 - val_loss: 0.8573\n",
  2753. "Epoch 4/5\n",
  2754. "291/291 [==============================] - 0s 1ms/step - loss: 0.7108 - val_loss: 0.6874\n",
  2755. "Epoch 5/5\n",
  2756. "291/291 [==============================] - 0s 1ms/step - loss: 0.6012 - val_loss: 0.5960\n",
  2757. "73/73 [==============================] - 0s 742us/step - loss: 0.5926\n",
  2758. "Epoch 1/5\n",
  2759. "291/291 [==============================] - 0s 1ms/step - loss: 1.1771 - val_loss: 0.8257\n",
  2760. "Epoch 2/5\n",
  2761. "291/291 [==============================] - 0s 1ms/step - loss: 0.6659 - val_loss: 0.5984\n",
  2762. "Epoch 3/5\n",
  2763. "291/291 [==============================] - 0s 1ms/step - loss: 0.5285 - val_loss: 0.5065\n",
  2764. "Epoch 4/5\n",
  2765. "291/291 [==============================] - 0s 1ms/step - loss: 0.4763 - val_loss: 0.4782\n",
  2766. "Epoch 5/5\n",
  2767. "291/291 [==============================] - 0s 1ms/step - loss: 0.4529 - val_loss: 0.4741\n",
  2768. "73/73 [==============================] - 0s 755us/step - loss: 0.4219\n",
  2769. "Epoch 1/5\n",
  2770. "291/291 [==============================] - 0s 1ms/step - loss: 1.5939 - val_loss: 0.9120\n",
  2771. "Epoch 2/5\n",
  2772. "291/291 [==============================] - 0s 1ms/step - loss: 0.6828 - val_loss: 0.6495\n",
  2773. "Epoch 3/5\n",
  2774. "291/291 [==============================] - 0s 1ms/step - loss: 0.5929 - val_loss: 0.6132\n",
  2775. "Epoch 4/5\n",
  2776. "291/291 [==============================] - 0s 1ms/step - loss: 0.5457 - val_loss: 0.5562\n",
  2777. "Epoch 5/5\n",
  2778. "291/291 [==============================] - 0s 1ms/step - loss: 0.5004 - val_loss: 0.5157\n",
  2779. "73/73 [==============================] - 0s 774us/step - loss: 0.4834\n",
  2780. "Epoch 1/5\n",
  2781. "291/291 [==============================] - 0s 1ms/step - loss: 0.9915 - val_loss: 0.6667\n",
  2782. "Epoch 2/5\n",
  2783. "291/291 [==============================] - 0s 1ms/step - loss: 0.5703 - val_loss: 0.5541\n",
  2784. "Epoch 3/5\n",
  2785. "291/291 [==============================] - 0s 1ms/step - loss: 0.4845 - val_loss: 0.4962\n",
  2786. "Epoch 4/5\n",
  2787. "291/291 [==============================] - 0s 1ms/step - loss: 0.4528 - val_loss: 0.4982\n",
  2788. "Epoch 5/5\n",
  2789. "291/291 [==============================] - 0s 1ms/step - loss: 0.4361 - val_loss: 0.4585\n",
  2790. "73/73 [==============================] - 0s 729us/step - loss: 0.4401\n",
  2791. "Epoch 1/5\n",
  2792. "291/291 [==============================] - 0s 1ms/step - loss: 1.0619 - val_loss: 0.7947\n",
  2793. "Epoch 2/5\n",
  2794. "291/291 [==============================] - 0s 1ms/step - loss: 0.6585 - val_loss: 0.6640\n",
  2795. "Epoch 3/5\n",
  2796. "291/291 [==============================] - 0s 1ms/step - loss: 0.5914 - val_loss: 0.6046\n",
  2797. "Epoch 4/5\n",
  2798. "291/291 [==============================] - 0s 1ms/step - loss: 0.5141 - val_loss: 0.5454\n",
  2799. "Epoch 5/5\n",
  2800. "291/291 [==============================] - 0s 1ms/step - loss: 0.4708 - val_loss: 0.4902\n",
  2801. "73/73 [==============================] - 0s 736us/step - loss: 0.4912\n",
  2802. "Epoch 1/5\n",
  2803. "291/291 [==============================] - 0s 1ms/step - loss: 1.6543 - val_loss: 1.3844\n",
  2804. "Epoch 2/5\n",
  2805. "291/291 [==============================] - 0s 1ms/step - loss: 1.3007 - val_loss: 1.3839\n",
  2806. "Epoch 3/5\n",
  2807. "291/291 [==============================] - 0s 1ms/step - loss: 1.2982 - val_loss: 1.3854\n",
  2808. "Epoch 4/5\n",
  2809. "291/291 [==============================] - 0s 1ms/step - loss: 1.2926 - val_loss: 1.3732\n",
  2810. "Epoch 5/5\n",
  2811. "291/291 [==============================] - 0s 1ms/step - loss: 1.2319 - val_loss: 1.1939\n",
  2812. "73/73 [==============================] - 0s 780us/step - loss: 1.1303\n",
  2813. "Epoch 1/5\n",
  2814. "291/291 [==============================] - 0s 1ms/step - loss: 5.7917 - val_loss: 5.5066\n",
  2815. "Epoch 2/5\n",
  2816. "291/291 [==============================] - 0s 1ms/step - loss: 4.8560 - val_loss: 4.7270\n",
  2817. "Epoch 3/5\n",
  2818. "291/291 [==============================] - 0s 1ms/step - loss: 4.1973 - val_loss: 4.1366\n",
  2819. "Epoch 4/5\n",
  2820. "291/291 [==============================] - 0s 1ms/step - loss: 3.6845 - val_loss: 3.6708\n",
  2821. "Epoch 5/5\n",
  2822. "291/291 [==============================] - 0s 1ms/step - loss: 3.2759 - val_loss: 3.2976\n",
  2823. "73/73 [==============================] - 0s 701us/step - loss: 2.9728\n",
  2824. "Epoch 1/5\n",
  2825. "291/291 [==============================] - 0s 1ms/step - loss: 4.9083 - val_loss: 4.7492\n",
  2826. "Epoch 2/5\n",
  2827. "291/291 [==============================] - 0s 1ms/step - loss: 4.0715 - val_loss: 3.9193\n",
  2828. "Epoch 3/5\n",
  2829. "291/291 [==============================] - 0s 1ms/step - loss: 3.2992 - val_loss: 3.1821\n",
  2830. "Epoch 4/5\n",
  2831. "291/291 [==============================] - 0s 1ms/step - loss: 2.6500 - val_loss: 2.5896\n",
  2832. "Epoch 5/5\n",
  2833. "291/291 [==============================] - 0s 1ms/step - loss: 2.1446 - val_loss: 2.1322\n",
  2834. "73/73 [==============================] - 0s 777us/step - loss: 1.9510\n",
  2835. "Epoch 1/5\n",
  2836. "291/291 [==============================] - 0s 1ms/step - loss: 4.8912 - val_loss: 4.6190\n",
  2837. "Epoch 2/5\n",
  2838. "291/291 [==============================] - 0s 1ms/step - loss: 4.0104 - val_loss: 3.7542\n",
  2839. "Epoch 3/5\n",
  2840. "291/291 [==============================] - 0s 1ms/step - loss: 3.2130 - val_loss: 2.9729\n",
  2841. "Epoch 4/5\n",
  2842. "291/291 [==============================] - 0s 1ms/step - loss: 2.5034 - val_loss: 2.3048\n",
  2843. "Epoch 5/5\n",
  2844. "291/291 [==============================] - 0s 1ms/step - loss: 1.9377 - val_loss: 1.8098\n",
  2845. "73/73 [==============================] - 0s 774us/step - loss: 1.7352\n",
  2846. "Epoch 1/5\n",
  2847. "291/291 [==============================] - 0s 1ms/step - loss: 4.5316 - val_loss: 4.3711\n",
  2848. "Epoch 2/5\n",
  2849. "291/291 [==============================] - 0s 1ms/step - loss: 3.6231 - val_loss: 3.5695\n",
  2850. "Epoch 3/5\n",
  2851. "291/291 [==============================] - 0s 1ms/step - loss: 2.9017 - val_loss: 2.9258\n",
  2852. "Epoch 4/5\n",
  2853. "291/291 [==============================] - 0s 1ms/step - loss: 2.3273 - val_loss: 2.4130\n",
  2854. "Epoch 5/5\n",
  2855. "291/291 [==============================] - 0s 1ms/step - loss: 1.8888 - val_loss: 2.0205\n",
  2856. "73/73 [==============================] - 0s 728us/step - loss: 2.0817\n",
  2857. "Epoch 1/5\n",
  2858. "291/291 [==============================] - 0s 1ms/step - loss: 4.8721 - val_loss: 4.6714\n",
  2859. "Epoch 2/5\n",
  2860. "291/291 [==============================] - 0s 1ms/step - loss: 4.0584 - val_loss: 3.9194\n",
  2861. "Epoch 3/5\n",
  2862. "291/291 [==============================] - 0s 1ms/step - loss: 3.4054 - val_loss: 3.3343\n",
  2863. "Epoch 4/5\n",
  2864. "291/291 [==============================] - 0s 1ms/step - loss: 2.9067 - val_loss: 2.8892\n",
  2865. "Epoch 5/5\n",
  2866. "291/291 [==============================] - 0s 1ms/step - loss: 2.5348 - val_loss: 2.5585\n",
  2867. "73/73 [==============================] - 0s 763us/step - loss: 2.5073\n",
  2868. "Epoch 1/5\n",
  2869. "291/291 [==============================] - 0s 1ms/step - loss: 4.7243 - val_loss: 4.7440\n",
  2870. "Epoch 2/5\n",
  2871. "291/291 [==============================] - 0s 1ms/step - loss: 4.3244 - val_loss: 4.3444\n",
  2872. "Epoch 3/5\n",
  2873. "291/291 [==============================] - 0s 1ms/step - loss: 3.9538 - val_loss: 3.9732\n",
  2874. "Epoch 4/5\n",
  2875. "291/291 [==============================] - 0s 1ms/step - loss: 3.6101 - val_loss: 3.6294\n",
  2876. "Epoch 5/5\n",
  2877. "291/291 [==============================] - 0s 1ms/step - loss: 3.2937 - val_loss: 3.3129\n",
  2878. "73/73 [==============================] - 0s 705us/step - loss: 2.9149\n",
  2879. "Epoch 1/5\n",
  2880. "291/291 [==============================] - 0s 1ms/step - loss: 5.0703 - val_loss: 5.0427\n",
  2881. "Epoch 2/5\n",
  2882. "291/291 [==============================] - 0s 1ms/step - loss: 4.5272 - val_loss: 4.5287\n",
  2883. "Epoch 3/5\n",
  2884. "291/291 [==============================] - 0s 1ms/step - loss: 4.0533 - val_loss: 4.0586\n",
  2885. "Epoch 4/5\n",
  2886. "291/291 [==============================] - 0s 1ms/step - loss: 3.6127 - val_loss: 3.6163\n",
  2887. "Epoch 5/5\n",
  2888. "291/291 [==============================] - 0s 1ms/step - loss: 3.1983 - val_loss: 3.1995\n",
  2889. "73/73 [==============================] - 0s 760us/step - loss: 3.0800\n",
  2890. "Epoch 1/5\n"
  2891. ]
  2892. },
  2893. {
  2894. "name": "stdout",
  2895. "output_type": "stream",
  2896. "text": [
  2897. "291/291 [==============================] - 0s 1ms/step - loss: 4.6851 - val_loss: 4.5879\n",
  2898. "Epoch 2/5\n",
  2899. "291/291 [==============================] - 0s 1ms/step - loss: 4.1190 - val_loss: 4.0380\n",
  2900. "Epoch 3/5\n",
  2901. "291/291 [==============================] - 0s 1ms/step - loss: 3.6336 - val_loss: 3.5667\n",
  2902. "Epoch 4/5\n",
  2903. "291/291 [==============================] - 0s 1ms/step - loss: 3.2192 - val_loss: 3.1651\n",
  2904. "Epoch 5/5\n",
  2905. "291/291 [==============================] - 0s 1ms/step - loss: 2.8675 - val_loss: 2.8312\n",
  2906. "73/73 [==============================] - 0s 755us/step - loss: 2.6997\n",
  2907. "Epoch 1/5\n",
  2908. "291/291 [==============================] - 0s 1ms/step - loss: 5.4760 - val_loss: 5.4169\n",
  2909. "Epoch 2/5\n",
  2910. "291/291 [==============================] - 0s 1ms/step - loss: 4.9283 - val_loss: 4.8717\n",
  2911. "Epoch 3/5\n",
  2912. "291/291 [==============================] - 0s 1ms/step - loss: 4.4226 - val_loss: 4.3805\n",
  2913. "Epoch 4/5\n",
  2914. "291/291 [==============================] - 0s 1ms/step - loss: 3.9710 - val_loss: 3.9409\n",
  2915. "Epoch 5/5\n",
  2916. "291/291 [==============================] - 0s 1ms/step - loss: 3.5678 - val_loss: 3.5484\n",
  2917. "73/73 [==============================] - 0s 777us/step - loss: 3.2222\n",
  2918. "Epoch 1/5\n",
  2919. "291/291 [==============================] - 0s 1ms/step - loss: 5.8757 - val_loss: 5.9164\n",
  2920. "Epoch 2/5\n",
  2921. "291/291 [==============================] - 0s 1ms/step - loss: 5.4443 - val_loss: 5.5145\n",
  2922. "Epoch 3/5\n",
  2923. "291/291 [==============================] - 0s 1ms/step - loss: 5.0767 - val_loss: 5.1648\n",
  2924. "Epoch 4/5\n",
  2925. "291/291 [==============================] - 0s 1ms/step - loss: 4.7543 - val_loss: 4.8552\n",
  2926. "Epoch 5/5\n",
  2927. "291/291 [==============================] - 0s 1ms/step - loss: 4.4664 - val_loss: 4.5752\n",
  2928. "73/73 [==============================] - 0s 847us/step - loss: 4.4622\n",
  2929. "Epoch 1/5\n",
  2930. "291/291 [==============================] - 0s 1ms/step - loss: 1.8548 - val_loss: 1.0768\n",
  2931. "Epoch 2/5\n",
  2932. "291/291 [==============================] - 0s 1ms/step - loss: 0.9701 - val_loss: 0.8759\n",
  2933. "Epoch 3/5\n",
  2934. "291/291 [==============================] - 0s 1ms/step - loss: 0.8118 - val_loss: 0.8077\n",
  2935. "Epoch 4/5\n",
  2936. "291/291 [==============================] - 0s 1ms/step - loss: 0.7587 - val_loss: 0.7710\n",
  2937. "Epoch 5/5\n",
  2938. "291/291 [==============================] - 0s 1ms/step - loss: 0.7263 - val_loss: 0.7434\n",
  2939. "73/73 [==============================] - 0s 796us/step - loss: 0.6879\n",
  2940. "Epoch 1/5\n",
  2941. "291/291 [==============================] - 0s 1ms/step - loss: 2.3203 - val_loss: 1.2681\n",
  2942. "Epoch 2/5\n",
  2943. "291/291 [==============================] - 0s 1ms/step - loss: 1.0524 - val_loss: 0.9230\n",
  2944. "Epoch 3/5\n",
  2945. "291/291 [==============================] - 0s 1ms/step - loss: 0.8676 - val_loss: 0.8473\n",
  2946. "Epoch 4/5\n",
  2947. "291/291 [==============================] - 0s 1ms/step - loss: 0.8041 - val_loss: 0.8130\n",
  2948. "Epoch 5/5\n",
  2949. "291/291 [==============================] - 0s 1ms/step - loss: 0.7672 - val_loss: 0.7900\n",
  2950. "73/73 [==============================] - 0s 786us/step - loss: 0.7414\n",
  2951. "Epoch 1/5\n",
  2952. "291/291 [==============================] - 0s 1ms/step - loss: 2.0079 - val_loss: 1.2676\n",
  2953. "Epoch 2/5\n",
  2954. "291/291 [==============================] - 0s 1ms/step - loss: 0.9810 - val_loss: 0.8294\n",
  2955. "Epoch 3/5\n",
  2956. "291/291 [==============================] - 0s 1ms/step - loss: 0.7474 - val_loss: 0.7407\n",
  2957. "Epoch 4/5\n",
  2958. "291/291 [==============================] - 0s 2ms/step - loss: 0.6849 - val_loss: 0.7044\n",
  2959. "Epoch 5/5\n",
  2960. "291/291 [==============================] - 0s 1ms/step - loss: 0.6510 - val_loss: 0.6769\n",
  2961. "73/73 [==============================] - 0s 759us/step - loss: 0.6535\n",
  2962. "Epoch 1/5\n",
  2963. "291/291 [==============================] - 0s 1ms/step - loss: 2.5609 - val_loss: 1.0983\n",
  2964. "Epoch 2/5\n",
  2965. "291/291 [==============================] - 0s 1ms/step - loss: 0.9663 - val_loss: 0.7983\n",
  2966. "Epoch 3/5\n",
  2967. "291/291 [==============================] - 0s 1ms/step - loss: 0.7570 - val_loss: 0.7465\n",
  2968. "Epoch 4/5\n",
  2969. "291/291 [==============================] - 0s 1ms/step - loss: 0.7131 - val_loss: 0.7205\n",
  2970. "Epoch 5/5\n",
  2971. "291/291 [==============================] - 0s 1ms/step - loss: 0.6837 - val_loss: 0.6961\n",
  2972. "73/73 [==============================] - 0s 719us/step - loss: 0.6847\n",
  2973. "Epoch 1/5\n",
  2974. "291/291 [==============================] - 0s 1ms/step - loss: 3.5413 - val_loss: 1.5157\n",
  2975. "Epoch 2/5\n",
  2976. "291/291 [==============================] - 0s 1ms/step - loss: 0.9893 - val_loss: 0.8520\n",
  2977. "Epoch 3/5\n",
  2978. "291/291 [==============================] - 0s 1ms/step - loss: 0.7465 - val_loss: 0.7559\n",
  2979. "Epoch 4/5\n",
  2980. "291/291 [==============================] - 0s 1ms/step - loss: 0.6857 - val_loss: 0.7235\n",
  2981. "Epoch 5/5\n",
  2982. "291/291 [==============================] - 0s 1ms/step - loss: 0.6615 - val_loss: 0.7037\n",
  2983. "73/73 [==============================] - 0s 761us/step - loss: 0.6941\n",
  2984. "Epoch 1/5\n",
  2985. "291/291 [==============================] - 0s 1ms/step - loss: 1.4185 - val_loss: 0.6902\n",
  2986. "Epoch 2/5\n",
  2987. "291/291 [==============================] - 0s 1ms/step - loss: 0.5879 - val_loss: 0.5830\n",
  2988. "Epoch 3/5\n",
  2989. "291/291 [==============================] - 0s 1ms/step - loss: 0.5355 - val_loss: 0.5440\n",
  2990. "Epoch 4/5\n",
  2991. "291/291 [==============================] - 0s 1ms/step - loss: 0.5050 - val_loss: 0.5140\n",
  2992. "Epoch 5/5\n",
  2993. "291/291 [==============================] - 0s 1ms/step - loss: 0.4823 - val_loss: 0.4859\n",
  2994. "73/73 [==============================] - 0s 758us/step - loss: 0.4416\n",
  2995. "Epoch 1/5\n",
  2996. "291/291 [==============================] - 0s 1ms/step - loss: 1.3205 - val_loss: 0.7835\n",
  2997. "Epoch 2/5\n",
  2998. "291/291 [==============================] - 0s 1ms/step - loss: 0.6445 - val_loss: 0.6327\n",
  2999. "Epoch 3/5\n",
  3000. "291/291 [==============================] - 0s 1ms/step - loss: 0.5660 - val_loss: 0.5760\n",
  3001. "Epoch 4/5\n",
  3002. "291/291 [==============================] - 0s 1ms/step - loss: 0.5259 - val_loss: 0.5444\n",
  3003. "Epoch 5/5\n",
  3004. "291/291 [==============================] - 0s 1ms/step - loss: 0.5085 - val_loss: 0.5290\n",
  3005. "73/73 [==============================] - 0s 737us/step - loss: 0.4999\n",
  3006. "Epoch 1/5\n",
  3007. "291/291 [==============================] - 0s 1ms/step - loss: 1.3146 - val_loss: 0.7413\n",
  3008. "Epoch 2/5\n",
  3009. "291/291 [==============================] - 0s 1ms/step - loss: 0.6855 - val_loss: 0.7386\n",
  3010. "Epoch 3/5\n",
  3011. "291/291 [==============================] - 0s 1ms/step - loss: 0.5709 - val_loss: 0.5905\n",
  3012. "Epoch 4/5\n",
  3013. "291/291 [==============================] - 0s 1ms/step - loss: 0.5327 - val_loss: 0.5569\n",
  3014. "Epoch 5/5\n",
  3015. "291/291 [==============================] - 0s 1ms/step - loss: 0.5035 - val_loss: 0.5270\n",
  3016. "73/73 [==============================] - 0s 795us/step - loss: 0.4928\n",
  3017. "Epoch 1/5\n",
  3018. "291/291 [==============================] - 0s 1ms/step - loss: 1.5669 - val_loss: 0.7950\n",
  3019. "Epoch 2/5\n",
  3020. "291/291 [==============================] - 0s 1ms/step - loss: 0.6954 - val_loss: 0.6713\n",
  3021. "Epoch 3/5\n",
  3022. "291/291 [==============================] - 0s 1ms/step - loss: 0.5902 - val_loss: 0.6095\n",
  3023. "Epoch 4/5\n",
  3024. "291/291 [==============================] - 0s 1ms/step - loss: 0.5351 - val_loss: 0.5577\n",
  3025. "Epoch 5/5\n",
  3026. "291/291 [==============================] - 0s 1ms/step - loss: 0.5012 - val_loss: 0.5367\n",
  3027. "73/73 [==============================] - 0s 723us/step - loss: 0.5204\n",
  3028. "Epoch 1/5\n",
  3029. "291/291 [==============================] - 0s 1ms/step - loss: 1.4189 - val_loss: 0.8067\n",
  3030. "Epoch 2/5\n",
  3031. "291/291 [==============================] - 0s 1ms/step - loss: 0.6699 - val_loss: 0.6762\n",
  3032. "Epoch 3/5\n",
  3033. "291/291 [==============================] - 0s 1ms/step - loss: 0.5838 - val_loss: 0.5925\n",
  3034. "Epoch 4/5\n",
  3035. "291/291 [==============================] - 0s 1ms/step - loss: 0.5286 - val_loss: 0.5424\n",
  3036. "Epoch 5/5\n",
  3037. "291/291 [==============================] - 0s 1ms/step - loss: 0.4895 - val_loss: 0.5176\n",
  3038. "73/73 [==============================] - 0s 781us/step - loss: 0.5051\n",
  3039. "Epoch 1/5\n",
  3040. "291/291 [==============================] - 0s 1ms/step - loss: 0.8700 - val_loss: 0.6013\n",
  3041. "Epoch 2/5\n",
  3042. "291/291 [==============================] - 0s 1ms/step - loss: 0.5352 - val_loss: 0.5694\n",
  3043. "Epoch 3/5\n",
  3044. "291/291 [==============================] - 0s 1ms/step - loss: 0.4747 - val_loss: 0.4876\n",
  3045. "Epoch 4/5\n",
  3046. "291/291 [==============================] - 0s 1ms/step - loss: 0.5306 - val_loss: 0.6161\n",
  3047. "Epoch 5/5\n",
  3048. "291/291 [==============================] - 0s 1ms/step - loss: 0.5256 - val_loss: 0.4948\n",
  3049. "73/73 [==============================] - 0s 698us/step - loss: 0.4264\n",
  3050. "Epoch 1/5\n",
  3051. "291/291 [==============================] - 0s 1ms/step - loss: 1.3430 - val_loss: 0.9737\n",
  3052. "Epoch 2/5\n",
  3053. "291/291 [==============================] - 0s 1ms/step - loss: 0.6794 - val_loss: 0.6473\n",
  3054. "Epoch 3/5\n",
  3055. "291/291 [==============================] - 0s 1ms/step - loss: 0.5394 - val_loss: 0.5385\n"
  3056. ]
  3057. },
  3058. {
  3059. "name": "stdout",
  3060. "output_type": "stream",
  3061. "text": [
  3062. "Epoch 4/5\n",
  3063. "291/291 [==============================] - 0s 1ms/step - loss: 0.4644 - val_loss: 0.4713\n",
  3064. "Epoch 5/5\n",
  3065. "291/291 [==============================] - 0s 1ms/step - loss: 0.4255 - val_loss: 0.4483\n",
  3066. "73/73 [==============================] - 0s 770us/step - loss: 0.4232\n",
  3067. "Epoch 1/5\n",
  3068. "291/291 [==============================] - 1s 2ms/step - loss: 0.9522 - val_loss: 0.5819\n",
  3069. "Epoch 2/5\n",
  3070. "291/291 [==============================] - 0s 1ms/step - loss: 0.5049 - val_loss: 0.5152\n",
  3071. "Epoch 3/5\n",
  3072. "291/291 [==============================] - 0s 1ms/step - loss: 0.4507 - val_loss: 0.4787\n",
  3073. "Epoch 4/5\n",
  3074. "291/291 [==============================] - 0s 1ms/step - loss: 0.4191 - val_loss: 0.4356\n",
  3075. "Epoch 5/5\n",
  3076. "291/291 [==============================] - 0s 1ms/step - loss: 0.4099 - val_loss: 0.4782\n",
  3077. "73/73 [==============================] - 0s 769us/step - loss: 0.4518\n",
  3078. "Epoch 1/5\n",
  3079. "291/291 [==============================] - 0s 1ms/step - loss: 0.8503 - val_loss: 0.6028\n",
  3080. "Epoch 2/5\n",
  3081. "291/291 [==============================] - 0s 1ms/step - loss: 0.5124 - val_loss: 0.4812\n",
  3082. "Epoch 3/5\n",
  3083. "291/291 [==============================] - 0s 1ms/step - loss: 0.4491 - val_loss: 0.4503\n",
  3084. "Epoch 4/5\n",
  3085. "291/291 [==============================] - 0s 1ms/step - loss: 0.4296 - val_loss: 0.4807\n",
  3086. "Epoch 5/5\n",
  3087. "291/291 [==============================] - 0s 1ms/step - loss: 0.4181 - val_loss: 0.4303\n",
  3088. "73/73 [==============================] - 0s 786us/step - loss: 0.4351\n",
  3089. "Epoch 1/5\n",
  3090. "291/291 [==============================] - 0s 1ms/step - loss: 0.7041 - val_loss: 0.5026\n",
  3091. "Epoch 2/5\n",
  3092. "291/291 [==============================] - 0s 1ms/step - loss: 0.4537 - val_loss: 0.4562\n",
  3093. "Epoch 3/5\n",
  3094. "291/291 [==============================] - 0s 1ms/step - loss: 0.4266 - val_loss: 0.5293\n",
  3095. "Epoch 4/5\n",
  3096. "291/291 [==============================] - 0s 1ms/step - loss: 0.4232 - val_loss: 0.4389\n",
  3097. "Epoch 5/5\n",
  3098. "291/291 [==============================] - 0s 1ms/step - loss: 0.3985 - val_loss: 0.4347\n",
  3099. "73/73 [==============================] - 0s 845us/step - loss: 0.4432\n",
  3100. "Epoch 1/5\n",
  3101. "291/291 [==============================] - 0s 1ms/step - loss: 4.4321 - val_loss: 4.0923\n",
  3102. "Epoch 2/5\n",
  3103. "291/291 [==============================] - 0s 1ms/step - loss: 3.5260 - val_loss: 3.2200\n",
  3104. "Epoch 3/5\n",
  3105. "291/291 [==============================] - 0s 1ms/step - loss: 2.7483 - val_loss: 2.4906\n",
  3106. "Epoch 4/5\n",
  3107. "291/291 [==============================] - 0s 1ms/step - loss: 2.1210 - val_loss: 1.9278\n",
  3108. "Epoch 5/5\n",
  3109. "291/291 [==============================] - 0s 1ms/step - loss: 1.6664 - val_loss: 1.5521\n",
  3110. "73/73 [==============================] - 0s 781us/step - loss: 1.6628\n",
  3111. "Epoch 1/5\n",
  3112. "291/291 [==============================] - 0s 1ms/step - loss: 5.2358 - val_loss: 4.9881\n",
  3113. "Epoch 2/5\n",
  3114. "291/291 [==============================] - 0s 1ms/step - loss: 4.3008 - val_loss: 4.1395\n",
  3115. "Epoch 3/5\n",
  3116. "291/291 [==============================] - 0s 1ms/step - loss: 3.5633 - val_loss: 3.4276\n",
  3117. "Epoch 4/5\n",
  3118. "291/291 [==============================] - 0s 1ms/step - loss: 2.9315 - val_loss: 2.8350\n",
  3119. "Epoch 5/5\n",
  3120. "291/291 [==============================] - 0s 2ms/step - loss: 2.4018 - val_loss: 2.3065\n",
  3121. "73/73 [==============================] - 0s 818us/step - loss: 2.2094\n",
  3122. "Epoch 1/5\n",
  3123. "291/291 [==============================] - 0s 2ms/step - loss: 4.2980 - val_loss: 3.7346\n",
  3124. "Epoch 2/5\n",
  3125. "291/291 [==============================] - 0s 1ms/step - loss: 3.0664 - val_loss: 2.7511\n",
  3126. "Epoch 3/5\n",
  3127. "291/291 [==============================] - 0s 1ms/step - loss: 2.3307 - val_loss: 2.1660\n",
  3128. "Epoch 4/5\n",
  3129. "291/291 [==============================] - 0s 1ms/step - loss: 1.8981 - val_loss: 1.8161\n",
  3130. "Epoch 5/5\n",
  3131. "291/291 [==============================] - 0s 1ms/step - loss: 1.6396 - val_loss: 1.6028\n",
  3132. "73/73 [==============================] - 0s 773us/step - loss: 1.5083\n",
  3133. "Epoch 1/5\n",
  3134. "291/291 [==============================] - 0s 1ms/step - loss: 5.8935 - val_loss: 5.1293\n",
  3135. "Epoch 2/5\n",
  3136. "291/291 [==============================] - 0s 1ms/step - loss: 4.2542 - val_loss: 3.7806\n",
  3137. "Epoch 3/5\n",
  3138. "291/291 [==============================] - 0s 1ms/step - loss: 3.1549 - val_loss: 2.8201\n",
  3139. "Epoch 4/5\n",
  3140. "291/291 [==============================] - 0s 1ms/step - loss: 2.3597 - val_loss: 2.1546\n",
  3141. "Epoch 5/5\n",
  3142. "291/291 [==============================] - 0s 1ms/step - loss: 1.8199 - val_loss: 1.7272\n",
  3143. "73/73 [==============================] - 0s 818us/step - loss: 1.7360\n",
  3144. "Epoch 1/5\n",
  3145. "291/291 [==============================] - 0s 1ms/step - loss: 5.1310 - val_loss: 4.2771\n",
  3146. "Epoch 2/5\n",
  3147. "291/291 [==============================] - 0s 1ms/step - loss: 3.3557 - val_loss: 2.9178\n",
  3148. "Epoch 3/5\n",
  3149. "291/291 [==============================] - 0s 1ms/step - loss: 2.3694 - val_loss: 2.1440\n",
  3150. "Epoch 4/5\n",
  3151. "291/291 [==============================] - 0s 1ms/step - loss: 1.8242 - val_loss: 1.7344\n",
  3152. "Epoch 5/5\n",
  3153. "291/291 [==============================] - 0s 1ms/step - loss: 1.5293 - val_loss: 1.5075\n",
  3154. "73/73 [==============================] - 0s 775us/step - loss: 1.3935\n",
  3155. "Epoch 1/5\n",
  3156. "291/291 [==============================] - 0s 1ms/step - loss: 5.2488 - val_loss: 5.0350\n",
  3157. "Epoch 2/5\n",
  3158. "291/291 [==============================] - 0s 1ms/step - loss: 4.5001 - val_loss: 4.3541\n",
  3159. "Epoch 3/5\n",
  3160. "291/291 [==============================] - 0s 1ms/step - loss: 3.9170 - val_loss: 3.8163\n",
  3161. "Epoch 4/5\n",
  3162. "291/291 [==============================] - 0s 1ms/step - loss: 3.4524 - val_loss: 3.3839\n",
  3163. "Epoch 5/5\n",
  3164. "291/291 [==============================] - 0s 1ms/step - loss: 3.0762 - val_loss: 3.0316\n",
  3165. "73/73 [==============================] - 0s 771us/step - loss: 2.7984\n",
  3166. "Epoch 1/5\n",
  3167. "291/291 [==============================] - 0s 1ms/step - loss: 5.0998 - val_loss: 5.0259\n",
  3168. "Epoch 2/5\n",
  3169. "291/291 [==============================] - 0s 1ms/step - loss: 4.4961 - val_loss: 4.4406\n",
  3170. "Epoch 3/5\n",
  3171. "291/291 [==============================] - 0s 1ms/step - loss: 3.9670 - val_loss: 3.9189\n",
  3172. "Epoch 4/5\n",
  3173. "291/291 [==============================] - 0s 1ms/step - loss: 3.4932 - val_loss: 3.4478\n",
  3174. "Epoch 5/5\n",
  3175. "291/291 [==============================] - 0s 1ms/step - loss: 3.0657 - val_loss: 3.0211\n",
  3176. "73/73 [==============================] - 0s 752us/step - loss: 2.9453\n",
  3177. "Epoch 1/5\n",
  3178. "291/291 [==============================] - 0s 1ms/step - loss: 5.5333 - val_loss: 5.3007\n",
  3179. "Epoch 2/5\n",
  3180. "291/291 [==============================] - 0s 1ms/step - loss: 4.6258 - val_loss: 4.4978\n",
  3181. "Epoch 3/5\n",
  3182. "291/291 [==============================] - 0s 1ms/step - loss: 3.9411 - val_loss: 3.8734\n",
  3183. "Epoch 4/5\n",
  3184. "291/291 [==============================] - 0s 1ms/step - loss: 3.3986 - val_loss: 3.3748\n",
  3185. "Epoch 5/5\n",
  3186. "291/291 [==============================] - 0s 1ms/step - loss: 2.9716 - val_loss: 2.9850\n",
  3187. "73/73 [==============================] - 0s 778us/step - loss: 2.8356\n",
  3188. "Epoch 1/5\n",
  3189. "291/291 [==============================] - 0s 1ms/step - loss: 5.6609 - val_loss: 5.4999\n",
  3190. "Epoch 2/5\n",
  3191. "291/291 [==============================] - 0s 1ms/step - loss: 5.0125 - val_loss: 4.9175\n",
  3192. "Epoch 3/5\n",
  3193. "291/291 [==============================] - 0s 1ms/step - loss: 4.4936 - val_loss: 4.4305\n",
  3194. "Epoch 4/5\n",
  3195. "291/291 [==============================] - 0s 1ms/step - loss: 4.0459 - val_loss: 3.9985\n",
  3196. "Epoch 5/5\n",
  3197. "291/291 [==============================] - 0s 1ms/step - loss: 3.6400 - val_loss: 3.6020\n",
  3198. "73/73 [==============================] - 0s 768us/step - loss: 3.3525\n",
  3199. "Epoch 1/5\n",
  3200. "291/291 [==============================] - 0s 1ms/step - loss: 4.5816 - val_loss: 4.5968\n",
  3201. "Epoch 2/5\n",
  3202. "291/291 [==============================] - 0s 1ms/step - loss: 4.1501 - val_loss: 4.1635\n",
  3203. "Epoch 3/5\n",
  3204. "291/291 [==============================] - 0s 1ms/step - loss: 3.7429 - val_loss: 3.7533\n",
  3205. "Epoch 4/5\n",
  3206. "291/291 [==============================] - 0s 1ms/step - loss: 3.3596 - val_loss: 3.3686\n",
  3207. "Epoch 5/5\n",
  3208. "291/291 [==============================] - 0s 1ms/step - loss: 3.0028 - val_loss: 3.0107\n",
  3209. "73/73 [==============================] - 0s 768us/step - loss: 2.9379\n",
  3210. "Epoch 1/5\n",
  3211. "291/291 [==============================] - 0s 1ms/step - loss: 2.1063 - val_loss: 1.3336\n",
  3212. "Epoch 2/5\n",
  3213. "291/291 [==============================] - 0s 1ms/step - loss: 1.0773 - val_loss: 1.0099\n",
  3214. "Epoch 3/5\n",
  3215. "291/291 [==============================] - 0s 1ms/step - loss: 0.8682 - val_loss: 0.8611\n",
  3216. "Epoch 4/5\n",
  3217. "291/291 [==============================] - 0s 1ms/step - loss: 0.7651 - val_loss: 0.7760\n",
  3218. "Epoch 5/5\n",
  3219. "291/291 [==============================] - 0s 1ms/step - loss: 0.7061 - val_loss: 0.7249\n",
  3220. "73/73 [==============================] - 0s 810us/step - loss: 0.6433\n",
  3221. "Epoch 1/5\n"
  3222. ]
  3223. },
  3224. {
  3225. "name": "stdout",
  3226. "output_type": "stream",
  3227. "text": [
  3228. "291/291 [==============================] - 0s 1ms/step - loss: 2.6956 - val_loss: 1.2067\n",
  3229. "Epoch 2/5\n",
  3230. "291/291 [==============================] - 0s 1ms/step - loss: 0.9746 - val_loss: 0.8711\n",
  3231. "Epoch 3/5\n",
  3232. "291/291 [==============================] - 0s 1ms/step - loss: 0.7953 - val_loss: 0.7796\n",
  3233. "Epoch 4/5\n",
  3234. "291/291 [==============================] - 1s 2ms/step - loss: 0.7263 - val_loss: 0.7387\n",
  3235. "Epoch 5/5\n",
  3236. "291/291 [==============================] - 0s 1ms/step - loss: 0.6883 - val_loss: 0.7117\n",
  3237. "73/73 [==============================] - 0s 823us/step - loss: 0.6568\n",
  3238. "Epoch 1/5\n",
  3239. "291/291 [==============================] - 0s 1ms/step - loss: 1.9465 - val_loss: 0.9631\n",
  3240. "Epoch 2/5\n",
  3241. "291/291 [==============================] - 0s 1ms/step - loss: 0.7787 - val_loss: 0.7944\n",
  3242. "Epoch 3/5\n",
  3243. "291/291 [==============================] - 0s 1ms/step - loss: 0.6959 - val_loss: 0.7343\n",
  3244. "Epoch 4/5\n",
  3245. "291/291 [==============================] - 0s 1ms/step - loss: 0.6613 - val_loss: 0.6995\n",
  3246. "Epoch 5/5\n",
  3247. "291/291 [==============================] - 0s 1ms/step - loss: 0.6373 - val_loss: 0.6716\n",
  3248. "73/73 [==============================] - 0s 767us/step - loss: 0.6202\n",
  3249. "Epoch 1/5\n",
  3250. "291/291 [==============================] - 0s 1ms/step - loss: 2.1517 - val_loss: 1.5213\n",
  3251. "Epoch 2/5\n",
  3252. "291/291 [==============================] - 0s 1ms/step - loss: 1.1584 - val_loss: 1.1266\n",
  3253. "Epoch 3/5\n",
  3254. "291/291 [==============================] - 0s 1ms/step - loss: 0.9213 - val_loss: 0.9497\n",
  3255. "Epoch 4/5\n",
  3256. "291/291 [==============================] - 0s 1ms/step - loss: 0.8054 - val_loss: 0.8517\n",
  3257. "Epoch 5/5\n",
  3258. "291/291 [==============================] - 0s 1ms/step - loss: 0.7365 - val_loss: 0.7903\n",
  3259. "73/73 [==============================] - 0s 759us/step - loss: 0.7672\n",
  3260. "Epoch 1/5\n",
  3261. "291/291 [==============================] - 0s 1ms/step - loss: 2.1020 - val_loss: 1.1096\n",
  3262. "Epoch 2/5\n",
  3263. "291/291 [==============================] - 0s 1ms/step - loss: 0.9260 - val_loss: 0.8256\n",
  3264. "Epoch 3/5\n",
  3265. "291/291 [==============================] - 0s 1ms/step - loss: 0.7213 - val_loss: 0.7451\n",
  3266. "Epoch 4/5\n",
  3267. "291/291 [==============================] - 0s 1ms/step - loss: 0.6662 - val_loss: 0.7047\n",
  3268. "Epoch 5/5\n",
  3269. "291/291 [==============================] - 0s 1ms/step - loss: 0.6375 - val_loss: 0.6834\n",
  3270. "73/73 [==============================] - 0s 818us/step - loss: 0.6531\n",
  3271. "Epoch 1/5\n",
  3272. "291/291 [==============================] - 0s 1ms/step - loss: 1.2426 - val_loss: 0.7222\n",
  3273. "Epoch 2/5\n",
  3274. "291/291 [==============================] - 0s 1ms/step - loss: 0.6078 - val_loss: 0.6015\n",
  3275. "Epoch 3/5\n",
  3276. "291/291 [==============================] - 0s 1ms/step - loss: 0.5371 - val_loss: 0.5525\n",
  3277. "Epoch 4/5\n",
  3278. "291/291 [==============================] - 0s 1ms/step - loss: 0.4924 - val_loss: 0.5019\n",
  3279. "Epoch 5/5\n",
  3280. "291/291 [==============================] - 0s 1ms/step - loss: 0.4585 - val_loss: 0.4687\n",
  3281. "73/73 [==============================] - 0s 827us/step - loss: 0.4070\n",
  3282. "Epoch 1/5\n",
  3283. "291/291 [==============================] - 0s 1ms/step - loss: 1.1237 - val_loss: 0.6358\n",
  3284. "Epoch 2/5\n",
  3285. "291/291 [==============================] - 0s 1ms/step - loss: 0.5595 - val_loss: 0.5574\n",
  3286. "Epoch 3/5\n",
  3287. "291/291 [==============================] - 0s 1ms/step - loss: 0.4957 - val_loss: 0.5184\n",
  3288. "Epoch 4/5\n",
  3289. "291/291 [==============================] - 0s 1ms/step - loss: 0.4621 - val_loss: 0.4803\n",
  3290. "Epoch 5/5\n",
  3291. "291/291 [==============================] - 0s 1ms/step - loss: 0.4386 - val_loss: 0.4552\n",
  3292. "73/73 [==============================] - 0s 785us/step - loss: 0.4386\n",
  3293. "Epoch 1/5\n",
  3294. "291/291 [==============================] - 0s 1ms/step - loss: 1.2753 - val_loss: 0.6914\n",
  3295. "Epoch 2/5\n",
  3296. "291/291 [==============================] - 0s 1ms/step - loss: 0.5911 - val_loss: 0.5660\n",
  3297. "Epoch 3/5\n",
  3298. "291/291 [==============================] - 0s 1ms/step - loss: 0.5103 - val_loss: 0.4981\n",
  3299. "Epoch 4/5\n",
  3300. "291/291 [==============================] - 0s 1ms/step - loss: 0.4599 - val_loss: 0.4571\n",
  3301. "Epoch 5/5\n",
  3302. "291/291 [==============================] - 0s 1ms/step - loss: 0.4301 - val_loss: 0.4516\n",
  3303. "73/73 [==============================] - 0s 848us/step - loss: 0.4486\n",
  3304. "Epoch 1/5\n",
  3305. "291/291 [==============================] - 0s 2ms/step - loss: 1.6190 - val_loss: 0.8127\n",
  3306. "Epoch 2/5\n",
  3307. "291/291 [==============================] - 0s 1ms/step - loss: 0.6327 - val_loss: 0.6364\n",
  3308. "Epoch 3/5\n",
  3309. "291/291 [==============================] - 0s 1ms/step - loss: 0.5491 - val_loss: 0.5778\n",
  3310. "Epoch 4/5\n",
  3311. "291/291 [==============================] - 0s 1ms/step - loss: 0.5029 - val_loss: 0.5331\n",
  3312. "Epoch 5/5\n",
  3313. "291/291 [==============================] - 0s 1ms/step - loss: 0.4679 - val_loss: 0.4893\n",
  3314. "73/73 [==============================] - 0s 955us/step - loss: 0.4766\n",
  3315. "Epoch 1/5\n",
  3316. "291/291 [==============================] - 0s 2ms/step - loss: 1.0793 - val_loss: 0.7095\n",
  3317. "Epoch 2/5\n",
  3318. "291/291 [==============================] - 0s 1ms/step - loss: 0.6015 - val_loss: 0.6126\n",
  3319. "Epoch 3/5\n",
  3320. "291/291 [==============================] - 0s 1ms/step - loss: 0.5305 - val_loss: 0.5503\n",
  3321. "Epoch 4/5\n",
  3322. "291/291 [==============================] - 0s 1ms/step - loss: 0.4877 - val_loss: 0.5056\n",
  3323. "Epoch 5/5\n",
  3324. "291/291 [==============================] - 0s 1ms/step - loss: 0.4545 - val_loss: 0.4873\n",
  3325. "73/73 [==============================] - 0s 781us/step - loss: 0.4746\n",
  3326. "Epoch 1/5\n",
  3327. "291/291 [==============================] - 0s 1ms/step - loss: 1.3182 - val_loss: 0.8138\n",
  3328. "Epoch 2/5\n",
  3329. "291/291 [==============================] - 0s 1ms/step - loss: 1.2858 - val_loss: 0.6439\n",
  3330. "Epoch 3/5\n",
  3331. "291/291 [==============================] - 0s 1ms/step - loss: 0.7415 - val_loss: 0.4850\n",
  3332. "Epoch 4/5\n",
  3333. "291/291 [==============================] - 0s 1ms/step - loss: 0.4420 - val_loss: 0.4239\n",
  3334. "Epoch 5/5\n",
  3335. "291/291 [==============================] - 0s 1ms/step - loss: 0.3981 - val_loss: 0.3875\n",
  3336. "73/73 [==============================] - 0s 782us/step - loss: 0.3638\n",
  3337. "Epoch 1/5\n",
  3338. "291/291 [==============================] - 0s 1ms/step - loss: 0.8665 - val_loss: 0.5844\n",
  3339. "Epoch 2/5\n",
  3340. "291/291 [==============================] - 0s 1ms/step - loss: 0.4757 - val_loss: 0.4769\n",
  3341. "Epoch 3/5\n",
  3342. "291/291 [==============================] - 0s 1ms/step - loss: 0.4328 - val_loss: 0.4539\n",
  3343. "Epoch 4/5\n",
  3344. "291/291 [==============================] - 0s 1ms/step - loss: 0.4186 - val_loss: 0.4185\n",
  3345. "Epoch 5/5\n",
  3346. "291/291 [==============================] - 0s 1ms/step - loss: 0.4038 - val_loss: 0.4857\n",
  3347. "73/73 [==============================] - 0s 786us/step - loss: 0.4802\n",
  3348. "Epoch 1/5\n",
  3349. "291/291 [==============================] - 0s 1ms/step - loss: 0.9001 - val_loss: 0.5798\n",
  3350. "Epoch 2/5\n",
  3351. "291/291 [==============================] - 0s 1ms/step - loss: 0.5173 - val_loss: 0.4661\n",
  3352. "Epoch 3/5\n",
  3353. "291/291 [==============================] - 0s 1ms/step - loss: 0.4239 - val_loss: 0.4287\n",
  3354. "Epoch 4/5\n",
  3355. "291/291 [==============================] - 0s 1ms/step - loss: 0.4030 - val_loss: 0.4171\n",
  3356. "Epoch 5/5\n",
  3357. "291/291 [==============================] - 0s 1ms/step - loss: 0.3875 - val_loss: 0.3909\n",
  3358. "73/73 [==============================] - 0s 792us/step - loss: 0.3829\n",
  3359. "Epoch 1/5\n",
  3360. "291/291 [==============================] - 0s 1ms/step - loss: 0.8028 - val_loss: 0.5519\n",
  3361. "Epoch 2/5\n",
  3362. "291/291 [==============================] - 0s 1ms/step - loss: 0.4774 - val_loss: 0.4744\n",
  3363. "Epoch 3/5\n",
  3364. "291/291 [==============================] - 0s 1ms/step - loss: 0.4325 - val_loss: 0.4432\n",
  3365. "Epoch 4/5\n",
  3366. "291/291 [==============================] - 0s 1ms/step - loss: 0.4083 - val_loss: 0.4202\n",
  3367. "Epoch 5/5\n",
  3368. "291/291 [==============================] - 0s 1ms/step - loss: 0.3933 - val_loss: 0.4209\n",
  3369. "73/73 [==============================] - 0s 796us/step - loss: 0.4104\n",
  3370. "Epoch 1/5\n",
  3371. "291/291 [==============================] - 0s 1ms/step - loss: 0.8573 - val_loss: 0.5585\n",
  3372. "Epoch 2/5\n",
  3373. "291/291 [==============================] - 0s 1ms/step - loss: 0.4738 - val_loss: 0.4443\n",
  3374. "Epoch 3/5\n",
  3375. "291/291 [==============================] - 0s 1ms/step - loss: 0.4111 - val_loss: 0.4215\n",
  3376. "Epoch 4/5\n",
  3377. "291/291 [==============================] - 0s 1ms/step - loss: 0.3901 - val_loss: 0.4426\n",
  3378. "Epoch 5/5\n",
  3379. "291/291 [==============================] - 0s 1ms/step - loss: 0.3777 - val_loss: 0.3919\n",
  3380. "73/73 [==============================] - 0s 798us/step - loss: 0.3917\n",
  3381. "Epoch 1/5\n",
  3382. "291/291 [==============================] - 0s 1ms/step - loss: 4.1141 - val_loss: 3.5747\n",
  3383. "Epoch 2/5\n",
  3384. "291/291 [==============================] - 0s 1ms/step - loss: 2.8839 - val_loss: 2.5261\n",
  3385. "Epoch 3/5\n",
  3386. "291/291 [==============================] - 0s 1ms/step - loss: 2.0667 - val_loss: 1.8818\n"
  3387. ]
  3388. },
  3389. {
  3390. "name": "stdout",
  3391. "output_type": "stream",
  3392. "text": [
  3393. "Epoch 4/5\n",
  3394. "291/291 [==============================] - 0s 1ms/step - loss: 1.5968 - val_loss: 1.5333\n",
  3395. "Epoch 5/5\n",
  3396. "291/291 [==============================] - 0s 1ms/step - loss: 1.3491 - val_loss: 1.3440\n",
  3397. "73/73 [==============================] - 0s 769us/step - loss: 1.3081\n",
  3398. "Epoch 1/5\n",
  3399. "291/291 [==============================] - 0s 1ms/step - loss: 5.8441 - val_loss: 4.7844\n",
  3400. "Epoch 2/5\n",
  3401. "291/291 [==============================] - 0s 1ms/step - loss: 3.7908 - val_loss: 3.3553\n",
  3402. "Epoch 3/5\n",
  3403. "291/291 [==============================] - 0s 1ms/step - loss: 2.7841 - val_loss: 2.5822\n",
  3404. "Epoch 4/5\n",
  3405. "291/291 [==============================] - 1s 2ms/step - loss: 2.2166 - val_loss: 2.1028\n",
  3406. "Epoch 5/5\n",
  3407. "291/291 [==============================] - 0s 1ms/step - loss: 1.8254 - val_loss: 1.7699\n",
  3408. "73/73 [==============================] - 0s 817us/step - loss: 1.6087\n",
  3409. "Epoch 1/5\n",
  3410. "291/291 [==============================] - 0s 1ms/step - loss: 5.8614 - val_loss: 5.4655\n",
  3411. "Epoch 2/5\n",
  3412. "291/291 [==============================] - 0s 1ms/step - loss: 4.6531 - val_loss: 4.3750\n",
  3413. "Epoch 3/5\n",
  3414. "291/291 [==============================] - 0s 1ms/step - loss: 3.7168 - val_loss: 3.4835\n",
  3415. "Epoch 4/5\n",
  3416. "291/291 [==============================] - 0s 1ms/step - loss: 2.9447 - val_loss: 2.7421\n",
  3417. "Epoch 5/5\n",
  3418. "291/291 [==============================] - 0s 1ms/step - loss: 2.3265 - val_loss: 2.1625\n",
  3419. "73/73 [==============================] - 0s 853us/step - loss: 2.0609\n",
  3420. "Epoch 1/5\n",
  3421. "291/291 [==============================] - 0s 2ms/step - loss: 4.4010 - val_loss: 3.7329\n",
  3422. "Epoch 2/5\n",
  3423. "291/291 [==============================] - 0s 1ms/step - loss: 2.9500 - val_loss: 2.5677\n",
  3424. "Epoch 3/5\n",
  3425. "291/291 [==============================] - 0s 1ms/step - loss: 2.0370 - val_loss: 1.8859\n",
  3426. "Epoch 4/5\n",
  3427. "291/291 [==============================] - 0s 1ms/step - loss: 1.5229 - val_loss: 1.5248\n",
  3428. "Epoch 5/5\n",
  3429. "291/291 [==============================] - 0s 1ms/step - loss: 1.2568 - val_loss: 1.3283\n",
  3430. "73/73 [==============================] - 0s 855us/step - loss: 1.4225\n",
  3431. "Epoch 1/5\n",
  3432. "291/291 [==============================] - 0s 2ms/step - loss: 4.9055 - val_loss: 4.2814\n",
  3433. "Epoch 2/5\n",
  3434. "291/291 [==============================] - 0s 1ms/step - loss: 3.4277 - val_loss: 3.0351\n",
  3435. "Epoch 3/5\n",
  3436. "291/291 [==============================] - 0s 1ms/step - loss: 2.4407 - val_loss: 2.1992\n",
  3437. "Epoch 4/5\n",
  3438. "291/291 [==============================] - 0s 1ms/step - loss: 1.8022 - val_loss: 1.6755\n",
  3439. "Epoch 5/5\n",
  3440. "291/291 [==============================] - 0s 1ms/step - loss: 1.4239 - val_loss: 1.3782\n",
  3441. "73/73 [==============================] - 0s 880us/step - loss: 1.2844\n",
  3442. "Epoch 1/5\n",
  3443. "291/291 [==============================] - 0s 1ms/step - loss: 5.1431 - val_loss: 5.0661\n",
  3444. "Epoch 2/5\n",
  3445. "291/291 [==============================] - 0s 1ms/step - loss: 4.6194 - val_loss: 4.5511\n",
  3446. "Epoch 3/5\n",
  3447. "291/291 [==============================] - 0s 1ms/step - loss: 4.1399 - val_loss: 4.0791\n",
  3448. "Epoch 4/5\n",
  3449. "291/291 [==============================] - 0s 1ms/step - loss: 3.7000 - val_loss: 3.6475\n",
  3450. "Epoch 5/5\n",
  3451. "291/291 [==============================] - 0s 1ms/step - loss: 3.2990 - val_loss: 3.2564\n",
  3452. "73/73 [==============================] - 0s 764us/step - loss: 2.9494\n",
  3453. "Epoch 1/5\n",
  3454. "291/291 [==============================] - 0s 1ms/step - loss: 5.2728 - val_loss: 5.1928\n",
  3455. "Epoch 2/5\n",
  3456. "291/291 [==============================] - 0s 1ms/step - loss: 4.5973 - val_loss: 4.5492\n",
  3457. "Epoch 3/5\n",
  3458. "291/291 [==============================] - 0s 1ms/step - loss: 4.0175 - val_loss: 3.9949\n",
  3459. "Epoch 4/5\n",
  3460. "291/291 [==============================] - 0s 1ms/step - loss: 3.5177 - val_loss: 3.5193\n",
  3461. "Epoch 5/5\n",
  3462. "291/291 [==============================] - 0s 1ms/step - loss: 3.0899 - val_loss: 3.1145\n",
  3463. "73/73 [==============================] - 0s 874us/step - loss: 2.9492\n",
  3464. "Epoch 1/5\n",
  3465. "291/291 [==============================] - 0s 1ms/step - loss: 5.3614 - val_loss: 5.0930\n",
  3466. "Epoch 2/5\n",
  3467. "291/291 [==============================] - 0s 1ms/step - loss: 4.4551 - val_loss: 4.2498\n",
  3468. "Epoch 3/5\n",
  3469. "291/291 [==============================] - 0s 1ms/step - loss: 3.7167 - val_loss: 3.5631\n",
  3470. "Epoch 4/5\n",
  3471. "291/291 [==============================] - 0s 1ms/step - loss: 3.1198 - val_loss: 3.0094\n",
  3472. "Epoch 5/5\n",
  3473. "291/291 [==============================] - 0s 1ms/step - loss: 2.6447 - val_loss: 2.5734\n",
  3474. "73/73 [==============================] - 0s 888us/step - loss: 2.4536\n",
  3475. "Epoch 1/5\n",
  3476. "291/291 [==============================] - 0s 2ms/step - loss: 4.2307 - val_loss: 4.1582\n",
  3477. "Epoch 2/5\n",
  3478. "291/291 [==============================] - 0s 1ms/step - loss: 3.7868 - val_loss: 3.7423\n",
  3479. "Epoch 3/5\n",
  3480. "291/291 [==============================] - 0s 1ms/step - loss: 3.4009 - val_loss: 3.3811\n",
  3481. "Epoch 4/5\n",
  3482. "291/291 [==============================] - 0s 1ms/step - loss: 3.0655 - val_loss: 3.0710\n",
  3483. "Epoch 5/5\n",
  3484. "291/291 [==============================] - 0s 1ms/step - loss: 2.7788 - val_loss: 2.8097\n",
  3485. "73/73 [==============================] - 0s 828us/step - loss: 2.7479\n",
  3486. "Epoch 1/5\n",
  3487. "291/291 [==============================] - 0s 1ms/step - loss: 5.8661 - val_loss: 5.4317\n",
  3488. "Epoch 2/5\n",
  3489. "291/291 [==============================] - 0s 1ms/step - loss: 4.6532 - val_loss: 4.3866\n",
  3490. "Epoch 3/5\n",
  3491. "291/291 [==============================] - 0s 1ms/step - loss: 3.7803 - val_loss: 3.5986\n",
  3492. "Epoch 4/5\n",
  3493. "291/291 [==============================] - 0s 1ms/step - loss: 3.1205 - val_loss: 2.9956\n",
  3494. "Epoch 5/5\n",
  3495. "291/291 [==============================] - 0s 1ms/step - loss: 2.6163 - val_loss: 2.5264\n",
  3496. "73/73 [==============================] - 0s 876us/step - loss: 2.4779\n",
  3497. "Epoch 1/5\n",
  3498. "291/291 [==============================] - 0s 2ms/step - loss: 2.2261 - val_loss: 0.9583\n",
  3499. "Epoch 2/5\n",
  3500. "291/291 [==============================] - 0s 1ms/step - loss: 0.8043 - val_loss: 0.8115\n",
  3501. "Epoch 3/5\n",
  3502. "291/291 [==============================] - 0s 1ms/step - loss: 0.7319 - val_loss: 0.7635\n",
  3503. "Epoch 4/5\n",
  3504. "291/291 [==============================] - 0s 1ms/step - loss: 0.6973 - val_loss: 0.7310\n",
  3505. "Epoch 5/5\n",
  3506. "291/291 [==============================] - 0s 1ms/step - loss: 0.6703 - val_loss: 0.7025\n",
  3507. "73/73 [==============================] - 0s 816us/step - loss: 0.6313\n",
  3508. "Epoch 1/5\n",
  3509. "291/291 [==============================] - 0s 1ms/step - loss: 2.5034 - val_loss: 1.1041\n",
  3510. "Epoch 2/5\n",
  3511. "291/291 [==============================] - 0s 1ms/step - loss: 0.9015 - val_loss: 0.8057\n",
  3512. "Epoch 3/5\n",
  3513. "291/291 [==============================] - 0s 1ms/step - loss: 0.6911 - val_loss: 0.7080\n",
  3514. "Epoch 4/5\n",
  3515. "291/291 [==============================] - 0s 1ms/step - loss: 0.6269 - val_loss: 0.6614\n",
  3516. "Epoch 5/5\n",
  3517. "291/291 [==============================] - 0s 1ms/step - loss: 0.5910 - val_loss: 0.6327\n",
  3518. "73/73 [==============================] - 0s 813us/step - loss: 0.5782\n",
  3519. "Epoch 1/5\n",
  3520. "291/291 [==============================] - 0s 2ms/step - loss: 1.7573 - val_loss: 0.9562\n",
  3521. "Epoch 2/5\n",
  3522. "291/291 [==============================] - 0s 1ms/step - loss: 0.7707 - val_loss: 0.7619\n",
  3523. "Epoch 3/5\n",
  3524. "291/291 [==============================] - 0s 1ms/step - loss: 0.6746 - val_loss: 0.7049\n",
  3525. "Epoch 4/5\n",
  3526. "291/291 [==============================] - 0s 1ms/step - loss: 0.6322 - val_loss: 0.6683\n",
  3527. "Epoch 5/5\n",
  3528. "291/291 [==============================] - 0s 1ms/step - loss: 0.6008 - val_loss: 0.6372\n",
  3529. "73/73 [==============================] - 0s 819us/step - loss: 0.5852\n",
  3530. "Epoch 1/5\n",
  3531. "291/291 [==============================] - 0s 2ms/step - loss: 2.3063 - val_loss: 1.2280\n",
  3532. "Epoch 2/5\n",
  3533. "291/291 [==============================] - 0s 1ms/step - loss: 0.8922 - val_loss: 0.7676\n",
  3534. "Epoch 3/5\n",
  3535. "291/291 [==============================] - 0s 1ms/step - loss: 0.6798 - val_loss: 0.6833\n",
  3536. "Epoch 4/5\n",
  3537. "291/291 [==============================] - 0s 1ms/step - loss: 0.6241 - val_loss: 0.6524\n",
  3538. "Epoch 5/5\n",
  3539. "291/291 [==============================] - 0s 1ms/step - loss: 0.5948 - val_loss: 0.6346\n",
  3540. "73/73 [==============================] - 0s 763us/step - loss: 0.6080\n",
  3541. "Epoch 1/5\n",
  3542. "291/291 [==============================] - 0s 2ms/step - loss: 2.4504 - val_loss: 1.1537\n",
  3543. "Epoch 2/5\n",
  3544. "291/291 [==============================] - 0s 1ms/step - loss: 0.9448 - val_loss: 0.8474\n",
  3545. "Epoch 3/5\n",
  3546. "291/291 [==============================] - 0s 1ms/step - loss: 0.7427 - val_loss: 0.7520\n",
  3547. "Epoch 4/5\n",
  3548. "291/291 [==============================] - 0s 1ms/step - loss: 0.6787 - val_loss: 0.7123\n",
  3549. "Epoch 5/5\n",
  3550. "291/291 [==============================] - 0s 1ms/step - loss: 0.6458 - val_loss: 0.6847\n",
  3551. "73/73 [==============================] - 0s 830us/step - loss: 0.6783\n",
  3552. "Epoch 1/5\n"
  3553. ]
  3554. },
  3555. {
  3556. "name": "stdout",
  3557. "output_type": "stream",
  3558. "text": [
  3559. "291/291 [==============================] - 0s 2ms/step - loss: 1.0933 - val_loss: 0.6732\n",
  3560. "Epoch 2/5\n",
  3561. "291/291 [==============================] - 0s 1ms/step - loss: 0.6431 - val_loss: 0.6026\n",
  3562. "Epoch 3/5\n",
  3563. "291/291 [==============================] - 0s 1ms/step - loss: 0.5387 - val_loss: 0.5419\n",
  3564. "Epoch 4/5\n",
  3565. "291/291 [==============================] - 0s 1ms/step - loss: 0.4936 - val_loss: 0.4938\n",
  3566. "Epoch 5/5\n",
  3567. "291/291 [==============================] - 0s 1ms/step - loss: 0.4602 - val_loss: 0.4644\n",
  3568. "73/73 [==============================] - 0s 913us/step - loss: 0.4152\n",
  3569. "Epoch 1/5\n",
  3570. "291/291 [==============================] - 0s 2ms/step - loss: 1.1063 - val_loss: 0.8657\n",
  3571. "Epoch 2/5\n",
  3572. "291/291 [==============================] - 0s 1ms/step - loss: 0.5982 - val_loss: 0.5560\n",
  3573. "Epoch 3/5\n",
  3574. "291/291 [==============================] - 0s 1ms/step - loss: 0.4924 - val_loss: 0.5038\n",
  3575. "Epoch 4/5\n",
  3576. "291/291 [==============================] - 1s 3ms/step - loss: 0.4548 - val_loss: 0.4688\n",
  3577. "Epoch 5/5\n",
  3578. "291/291 [==============================] - 0s 1ms/step - loss: 0.4277 - val_loss: 0.4545\n",
  3579. "73/73 [==============================] - 0s 797us/step - loss: 0.4458\n",
  3580. "Epoch 1/5\n",
  3581. "291/291 [==============================] - 0s 1ms/step - loss: 1.5650 - val_loss: 0.7463\n",
  3582. "Epoch 2/5\n",
  3583. "291/291 [==============================] - 0s 1ms/step - loss: 0.6417 - val_loss: 0.6208\n",
  3584. "Epoch 3/5\n",
  3585. "291/291 [==============================] - 0s 1ms/step - loss: 0.5449 - val_loss: 0.5523\n",
  3586. "Epoch 4/5\n",
  3587. "291/291 [==============================] - 0s 1ms/step - loss: 0.4925 - val_loss: 0.4965\n",
  3588. "Epoch 5/5\n",
  3589. "291/291 [==============================] - 0s 1ms/step - loss: 0.4490 - val_loss: 0.4607\n",
  3590. "73/73 [==============================] - 0s 1000us/step - loss: 0.4394\n",
  3591. "Epoch 1/5\n",
  3592. "291/291 [==============================] - 0s 2ms/step - loss: 0.9929 - val_loss: 0.6093\n",
  3593. "Epoch 2/5\n",
  3594. "291/291 [==============================] - 0s 1ms/step - loss: 0.5291 - val_loss: 0.5298\n",
  3595. "Epoch 3/5\n",
  3596. "291/291 [==============================] - 0s 1ms/step - loss: 0.4567 - val_loss: 0.4904\n",
  3597. "Epoch 4/5\n",
  3598. "291/291 [==============================] - 0s 1ms/step - loss: 0.4287 - val_loss: 0.4588\n",
  3599. "Epoch 5/5\n",
  3600. "291/291 [==============================] - 0s 1ms/step - loss: 0.4139 - val_loss: 0.4450\n",
  3601. "73/73 [==============================] - 0s 748us/step - loss: 0.4505\n",
  3602. "Epoch 1/5\n",
  3603. "291/291 [==============================] - 0s 1ms/step - loss: 1.0812 - val_loss: 0.6582\n",
  3604. "Epoch 2/5\n",
  3605. "291/291 [==============================] - 0s 1ms/step - loss: 0.5624 - val_loss: 0.5579\n",
  3606. "Epoch 3/5\n",
  3607. "291/291 [==============================] - 0s 1ms/step - loss: 0.4931 - val_loss: 0.5225\n",
  3608. "Epoch 4/5\n",
  3609. "291/291 [==============================] - 0s 1ms/step - loss: 0.4539 - val_loss: 0.4722\n",
  3610. "Epoch 5/5\n",
  3611. "291/291 [==============================] - 0s 1ms/step - loss: 0.4441 - val_loss: 0.4554\n",
  3612. "73/73 [==============================] - 0s 760us/step - loss: 0.4435\n",
  3613. "Epoch 1/5\n",
  3614. "291/291 [==============================] - 0s 1ms/step - loss: 0.7786 - val_loss: 0.5408\n",
  3615. "Epoch 2/5\n",
  3616. "291/291 [==============================] - 0s 1ms/step - loss: 0.4915 - val_loss: 0.4941\n",
  3617. "Epoch 3/5\n",
  3618. "291/291 [==============================] - 0s 1ms/step - loss: 0.4460 - val_loss: 0.4519\n",
  3619. "Epoch 4/5\n",
  3620. "291/291 [==============================] - 0s 1ms/step - loss: 0.4183 - val_loss: 0.4295\n",
  3621. "Epoch 5/5\n",
  3622. "291/291 [==============================] - 0s 1ms/step - loss: 0.4007 - val_loss: 0.4090\n",
  3623. "73/73 [==============================] - 0s 798us/step - loss: 0.3579\n",
  3624. "Epoch 1/5\n",
  3625. "291/291 [==============================] - 0s 1ms/step - loss: 1.1188 - val_loss: 1.0100\n",
  3626. "Epoch 2/5\n",
  3627. "291/291 [==============================] - 0s 1ms/step - loss: 1.1253 - val_loss: 0.4988\n",
  3628. "Epoch 3/5\n",
  3629. "291/291 [==============================] - 0s 1ms/step - loss: 0.5964 - val_loss: 0.4494\n",
  3630. "Epoch 4/5\n",
  3631. "291/291 [==============================] - 0s 1ms/step - loss: 0.4318 - val_loss: 0.4240\n",
  3632. "Epoch 5/5\n",
  3633. "291/291 [==============================] - 0s 1ms/step - loss: 0.3866 - val_loss: 0.4208\n",
  3634. "73/73 [==============================] - 0s 799us/step - loss: 0.3965\n",
  3635. "Epoch 1/5\n",
  3636. "291/291 [==============================] - 0s 1ms/step - loss: 1.0682 - val_loss: 0.6272\n",
  3637. "Epoch 2/5\n",
  3638. "291/291 [==============================] - 0s 1ms/step - loss: 0.5248 - val_loss: 0.5101\n",
  3639. "Epoch 3/5\n",
  3640. "291/291 [==============================] - 0s 1ms/step - loss: 0.4620 - val_loss: 0.4543\n",
  3641. "Epoch 4/5\n",
  3642. "291/291 [==============================] - 0s 1ms/step - loss: 0.4179 - val_loss: 0.4229\n",
  3643. "Epoch 5/5\n",
  3644. "291/291 [==============================] - 0s 1ms/step - loss: 0.4049 - val_loss: 0.4211\n",
  3645. "73/73 [==============================] - 0s 785us/step - loss: 0.4030\n",
  3646. "Epoch 1/5\n",
  3647. "291/291 [==============================] - 0s 1ms/step - loss: 1.1009 - val_loss: 0.7491\n",
  3648. "Epoch 2/5\n",
  3649. "291/291 [==============================] - 0s 1ms/step - loss: 0.9054 - val_loss: 0.5879\n",
  3650. "Epoch 3/5\n",
  3651. "291/291 [==============================] - 0s 1ms/step - loss: 0.5744 - val_loss: 1.0128\n",
  3652. "Epoch 4/5\n",
  3653. "291/291 [==============================] - 0s 1ms/step - loss: 1.2589 - val_loss: 0.4474\n",
  3654. "Epoch 5/5\n",
  3655. "291/291 [==============================] - 0s 1ms/step - loss: 0.4114 - val_loss: 0.4455\n",
  3656. "73/73 [==============================] - 0s 904us/step - loss: 0.4309\n",
  3657. "Epoch 1/5\n",
  3658. "291/291 [==============================] - 0s 1ms/step - loss: 0.8623 - val_loss: 0.6990\n",
  3659. "Epoch 2/5\n",
  3660. "291/291 [==============================] - 0s 1ms/step - loss: 0.6174 - val_loss: 0.4897\n",
  3661. "Epoch 3/5\n",
  3662. "291/291 [==============================] - 0s 1ms/step - loss: 0.4349 - val_loss: 0.4553\n",
  3663. "Epoch 4/5\n",
  3664. "291/291 [==============================] - 0s 2ms/step - loss: 0.4001 - val_loss: 0.4073\n",
  3665. "Epoch 5/5\n",
  3666. "291/291 [==============================] - 0s 1ms/step - loss: 0.3823 - val_loss: 0.3919\n",
  3667. "73/73 [==============================] - 0s 874us/step - loss: 0.4008\n",
  3668. "Epoch 1/5\n",
  3669. "291/291 [==============================] - 0s 2ms/step - loss: 6.3013 - val_loss: 5.7589\n",
  3670. "Epoch 2/5\n",
  3671. "291/291 [==============================] - 0s 1ms/step - loss: 5.1249 - val_loss: 4.9927\n",
  3672. "Epoch 3/5\n",
  3673. "291/291 [==============================] - 0s 1ms/step - loss: 4.4889 - val_loss: 4.4043\n",
  3674. "Epoch 4/5\n",
  3675. "291/291 [==============================] - 0s 1ms/step - loss: 3.9310 - val_loss: 3.8428\n",
  3676. "Epoch 5/5\n",
  3677. "291/291 [==============================] - 0s 1ms/step - loss: 3.3990 - val_loss: 3.3286\n",
  3678. "73/73 [==============================] - 0s 872us/step - loss: 3.0450\n",
  3679. "Epoch 1/5\n",
  3680. "291/291 [==============================] - 0s 2ms/step - loss: 5.6495 - val_loss: 5.5200\n",
  3681. "Epoch 2/5\n",
  3682. "291/291 [==============================] - 0s 1ms/step - loss: 4.9507 - val_loss: 4.9279\n",
  3683. "Epoch 3/5\n",
  3684. "291/291 [==============================] - 0s 1ms/step - loss: 4.4357 - val_loss: 4.4535\n",
  3685. "Epoch 4/5\n",
  3686. "291/291 [==============================] - 0s 1ms/step - loss: 4.0094 - val_loss: 4.0501\n",
  3687. "Epoch 5/5\n",
  3688. "291/291 [==============================] - 0s 1ms/step - loss: 3.6432 - val_loss: 3.6985\n",
  3689. "73/73 [==============================] - 0s 793us/step - loss: 3.5603\n",
  3690. "Epoch 1/5\n",
  3691. "291/291 [==============================] - 0s 2ms/step - loss: 5.3373 - val_loss: 5.1403\n",
  3692. "Epoch 2/5\n",
  3693. "291/291 [==============================] - 0s 1ms/step - loss: 4.4719 - val_loss: 4.3037\n",
  3694. "Epoch 3/5\n",
  3695. "291/291 [==============================] - 0s 1ms/step - loss: 3.6983 - val_loss: 3.5647\n",
  3696. "Epoch 4/5\n",
  3697. "291/291 [==============================] - 0s 1ms/step - loss: 3.0572 - val_loss: 2.9824\n",
  3698. "Epoch 5/5\n",
  3699. "291/291 [==============================] - 0s 1ms/step - loss: 2.5636 - val_loss: 2.5391\n",
  3700. "73/73 [==============================] - 0s 895us/step - loss: 2.4208\n",
  3701. "Epoch 1/5\n",
  3702. "291/291 [==============================] - 0s 2ms/step - loss: 6.4834 - val_loss: 5.6793\n",
  3703. "Epoch 2/5\n",
  3704. "291/291 [==============================] - 0s 1ms/step - loss: 4.8349 - val_loss: 4.5092\n",
  3705. "Epoch 3/5\n",
  3706. "291/291 [==============================] - 0s 1ms/step - loss: 3.9290 - val_loss: 3.7470\n",
  3707. "Epoch 4/5\n",
  3708. "291/291 [==============================] - 0s 1ms/step - loss: 3.2870 - val_loss: 3.1649\n",
  3709. "Epoch 5/5\n",
  3710. "291/291 [==============================] - 0s 1ms/step - loss: 2.7816 - val_loss: 2.6934\n",
  3711. "73/73 [==============================] - 0s 844us/step - loss: 2.4276\n",
  3712. "Epoch 1/5\n",
  3713. "291/291 [==============================] - 0s 1ms/step - loss: 5.3311 - val_loss: 5.3507\n",
  3714. "Epoch 2/5\n",
  3715. "291/291 [==============================] - 0s 1ms/step - loss: 4.8511 - val_loss: 4.9022\n",
  3716. "Epoch 3/5\n",
  3717. "291/291 [==============================] - 0s 1ms/step - loss: 4.4468 - val_loss: 4.5162\n"
  3718. ]
  3719. },
  3720. {
  3721. "name": "stdout",
  3722. "output_type": "stream",
  3723. "text": [
  3724. "Epoch 4/5\n",
  3725. "291/291 [==============================] - 0s 1ms/step - loss: 4.0957 - val_loss: 4.1770\n",
  3726. "Epoch 5/5\n",
  3727. "291/291 [==============================] - 0s 1ms/step - loss: 3.7860 - val_loss: 3.8764\n",
  3728. "73/73 [==============================] - 0s 821us/step - loss: 3.7849\n",
  3729. "Epoch 1/5\n",
  3730. "291/291 [==============================] - 0s 2ms/step - loss: 5.3911 - val_loss: 5.3873\n",
  3731. "Epoch 2/5\n",
  3732. "291/291 [==============================] - 0s 2ms/step - loss: 4.9861 - val_loss: 4.9873\n",
  3733. "Epoch 3/5\n",
  3734. "291/291 [==============================] - 0s 1ms/step - loss: 4.6055 - val_loss: 4.6125\n",
  3735. "Epoch 4/5\n",
  3736. "291/291 [==============================] - 0s 1ms/step - loss: 4.2501 - val_loss: 4.2620\n",
  3737. "Epoch 5/5\n",
  3738. "291/291 [==============================] - 0s 1ms/step - loss: 3.9193 - val_loss: 3.9388\n",
  3739. "73/73 [==============================] - 0s 732us/step - loss: 3.6123\n",
  3740. "Epoch 1/5\n",
  3741. "291/291 [==============================] - 1s 3ms/step - loss: 5.5293 - val_loss: 5.5240\n",
  3742. "Epoch 2/5\n",
  3743. "291/291 [==============================] - 0s 1ms/step - loss: 5.0536 - val_loss: 5.1001\n",
  3744. "Epoch 3/5\n",
  3745. "291/291 [==============================] - 0s 1ms/step - loss: 4.6797 - val_loss: 4.7466\n",
  3746. "Epoch 4/5\n",
  3747. "291/291 [==============================] - 0s 1ms/step - loss: 4.3571 - val_loss: 4.4317\n",
  3748. "Epoch 5/5\n",
  3749. "291/291 [==============================] - 0s 1ms/step - loss: 4.0642 - val_loss: 4.1392\n",
  3750. "73/73 [==============================] - 0s 888us/step - loss: 4.0325\n",
  3751. "Epoch 1/5\n",
  3752. "291/291 [==============================] - 0s 1ms/step - loss: 5.1841 - val_loss: 5.1848\n",
  3753. "Epoch 2/5\n",
  3754. "291/291 [==============================] - 0s 1ms/step - loss: 4.6986 - val_loss: 4.7079\n",
  3755. "Epoch 3/5\n",
  3756. "291/291 [==============================] - 0s 1ms/step - loss: 4.2596 - val_loss: 4.2800\n",
  3757. "Epoch 4/5\n",
  3758. "291/291 [==============================] - 0s 1ms/step - loss: 3.8662 - val_loss: 3.8965\n",
  3759. "Epoch 5/5\n",
  3760. "291/291 [==============================] - 0s 1ms/step - loss: 3.5141 - val_loss: 3.5534\n",
  3761. "73/73 [==============================] - 0s 960us/step - loss: 3.4113\n",
  3762. "Epoch 1/5\n",
  3763. "291/291 [==============================] - 0s 1ms/step - loss: 5.7287 - val_loss: 5.6310\n",
  3764. "Epoch 2/5\n",
  3765. "291/291 [==============================] - 0s 1ms/step - loss: 5.1762 - val_loss: 5.1137\n",
  3766. "Epoch 3/5\n",
  3767. "291/291 [==============================] - 0s 1ms/step - loss: 4.7019 - val_loss: 4.6683\n",
  3768. "Epoch 4/5\n",
  3769. "291/291 [==============================] - 0s 1ms/step - loss: 4.2917 - val_loss: 4.2787\n",
  3770. "Epoch 5/5\n",
  3771. "291/291 [==============================] - 0s 1ms/step - loss: 3.9323 - val_loss: 3.9353\n",
  3772. "73/73 [==============================] - 0s 808us/step - loss: 3.6054\n",
  3773. "Epoch 1/5\n",
  3774. "291/291 [==============================] - 0s 2ms/step - loss: 5.5706 - val_loss: 5.6552\n",
  3775. "Epoch 2/5\n",
  3776. "291/291 [==============================] - 0s 1ms/step - loss: 5.1842 - val_loss: 5.2904\n",
  3777. "Epoch 3/5\n",
  3778. "291/291 [==============================] - 0s 1ms/step - loss: 4.8508 - val_loss: 4.9642\n",
  3779. "Epoch 4/5\n",
  3780. "291/291 [==============================] - 0s 1ms/step - loss: 4.5446 - val_loss: 4.6556\n",
  3781. "Epoch 5/5\n",
  3782. "291/291 [==============================] - 0s 1ms/step - loss: 4.2481 - val_loss: 4.3469\n",
  3783. "73/73 [==============================] - 0s 899us/step - loss: 4.2459\n",
  3784. "Epoch 1/5\n",
  3785. "291/291 [==============================] - 0s 2ms/step - loss: 2.7815 - val_loss: 1.3474\n",
  3786. "Epoch 2/5\n",
  3787. "291/291 [==============================] - 0s 1ms/step - loss: 1.1558 - val_loss: 1.0436\n",
  3788. "Epoch 3/5\n",
  3789. "291/291 [==============================] - 0s 1ms/step - loss: 0.9509 - val_loss: 0.9115\n",
  3790. "Epoch 4/5\n",
  3791. "291/291 [==============================] - 0s 1ms/step - loss: 0.8164 - val_loss: 0.8253\n",
  3792. "Epoch 5/5\n",
  3793. "291/291 [==============================] - 0s 1ms/step - loss: 0.7588 - val_loss: 0.7850\n",
  3794. "73/73 [==============================] - 0s 921us/step - loss: 0.7184\n",
  3795. "Epoch 1/5\n",
  3796. "291/291 [==============================] - 0s 1ms/step - loss: 3.7473 - val_loss: 2.0229\n",
  3797. "Epoch 2/5\n",
  3798. "291/291 [==============================] - 0s 1ms/step - loss: 1.3204 - val_loss: 1.1725\n",
  3799. "Epoch 3/5\n",
  3800. "291/291 [==============================] - 0s 1ms/step - loss: 1.0402 - val_loss: 1.0226\n",
  3801. "Epoch 4/5\n",
  3802. "291/291 [==============================] - 0s 1ms/step - loss: 0.9047 - val_loss: 0.8877\n",
  3803. "Epoch 5/5\n",
  3804. "291/291 [==============================] - 0s 1ms/step - loss: 0.7979 - val_loss: 0.8043\n",
  3805. "73/73 [==============================] - 0s 788us/step - loss: 0.7757\n",
  3806. "Epoch 1/5\n",
  3807. "291/291 [==============================] - 0s 1ms/step - loss: 2.2130 - val_loss: 1.2914\n",
  3808. "Epoch 2/5\n",
  3809. "291/291 [==============================] - 0s 1ms/step - loss: 1.1067 - val_loss: 1.0467\n",
  3810. "Epoch 3/5\n",
  3811. "291/291 [==============================] - 0s 1ms/step - loss: 0.9310 - val_loss: 0.9050\n",
  3812. "Epoch 4/5\n",
  3813. "291/291 [==============================] - 0s 1ms/step - loss: 0.8237 - val_loss: 0.8164\n",
  3814. "Epoch 5/5\n",
  3815. "291/291 [==============================] - 0s 1ms/step - loss: 0.7445 - val_loss: 0.7477\n",
  3816. "73/73 [==============================] - 0s 806us/step - loss: 0.7259\n",
  3817. "Epoch 1/5\n",
  3818. "291/291 [==============================] - 0s 1ms/step - loss: 3.8627 - val_loss: 2.7832\n",
  3819. "Epoch 2/5\n",
  3820. "291/291 [==============================] - 0s 1ms/step - loss: 2.1044 - val_loss: 1.8399\n",
  3821. "Epoch 3/5\n",
  3822. "291/291 [==============================] - 0s 1ms/step - loss: 1.5691 - val_loss: 1.5350\n",
  3823. "Epoch 4/5\n",
  3824. "291/291 [==============================] - 0s 1ms/step - loss: 1.4011 - val_loss: 1.4344\n",
  3825. "Epoch 5/5\n",
  3826. "291/291 [==============================] - 0s 1ms/step - loss: 1.3485 - val_loss: 1.3994\n",
  3827. "73/73 [==============================] - 0s 1ms/step - loss: 1.2576\n",
  3828. "Epoch 1/5\n",
  3829. "291/291 [==============================] - 0s 1ms/step - loss: 2.2574 - val_loss: 1.2454\n",
  3830. "Epoch 2/5\n",
  3831. "291/291 [==============================] - 0s 1ms/step - loss: 1.0425 - val_loss: 0.9822\n",
  3832. "Epoch 3/5\n",
  3833. "291/291 [==============================] - 0s 1ms/step - loss: 0.8597 - val_loss: 0.8456\n",
  3834. "Epoch 4/5\n",
  3835. "291/291 [==============================] - 0s 1ms/step - loss: 0.7471 - val_loss: 0.7584\n",
  3836. "Epoch 5/5\n",
  3837. "291/291 [==============================] - 0s 2ms/step - loss: 0.6779 - val_loss: 0.7159\n",
  3838. "73/73 [==============================] - 0s 763us/step - loss: 0.6982\n",
  3839. "Epoch 1/5\n",
  3840. "291/291 [==============================] - 0s 1ms/step - loss: 1.6063 - val_loss: 0.9563\n",
  3841. "Epoch 2/5\n",
  3842. "291/291 [==============================] - 0s 1ms/step - loss: 0.7833 - val_loss: 0.7308\n",
  3843. "Epoch 3/5\n",
  3844. "291/291 [==============================] - 0s 1ms/step - loss: 0.6681 - val_loss: 0.6596\n",
  3845. "Epoch 4/5\n",
  3846. "291/291 [==============================] - 0s 1ms/step - loss: 0.6114 - val_loss: 0.6140\n",
  3847. "Epoch 5/5\n",
  3848. "291/291 [==============================] - 0s 1ms/step - loss: 0.5704 - val_loss: 0.5707\n",
  3849. "73/73 [==============================] - 0s 826us/step - loss: 0.5110\n",
  3850. "Epoch 1/5\n",
  3851. "291/291 [==============================] - 0s 1ms/step - loss: 1.3129 - val_loss: 0.7344\n",
  3852. "Epoch 2/5\n",
  3853. "291/291 [==============================] - 0s 1ms/step - loss: 0.6429 - val_loss: 0.6443\n",
  3854. "Epoch 3/5\n",
  3855. "291/291 [==============================] - 0s 1ms/step - loss: 0.5796 - val_loss: 0.5974\n",
  3856. "Epoch 4/5\n",
  3857. "291/291 [==============================] - 0s 1ms/step - loss: 0.5417 - val_loss: 0.5664\n",
  3858. "Epoch 5/5\n",
  3859. "291/291 [==============================] - 0s 1ms/step - loss: 0.5143 - val_loss: 0.5434\n",
  3860. "73/73 [==============================] - 0s 749us/step - loss: 0.5032\n",
  3861. "Epoch 1/5\n",
  3862. "291/291 [==============================] - 0s 1ms/step - loss: 1.6273 - val_loss: 1.1466\n",
  3863. "Epoch 2/5\n",
  3864. "291/291 [==============================] - 0s 1ms/step - loss: 0.8452 - val_loss: 0.7011\n",
  3865. "Epoch 3/5\n",
  3866. "291/291 [==============================] - 0s 1ms/step - loss: 0.6058 - val_loss: 0.5820\n",
  3867. "Epoch 4/5\n",
  3868. "291/291 [==============================] - 0s 1ms/step - loss: 0.5097 - val_loss: 0.4980\n",
  3869. "Epoch 5/5\n",
  3870. "291/291 [==============================] - 0s 1ms/step - loss: 0.4585 - val_loss: 0.4616\n",
  3871. "73/73 [==============================] - 0s 814us/step - loss: 0.4293\n",
  3872. "Epoch 1/5\n",
  3873. "291/291 [==============================] - 0s 1ms/step - loss: 1.9481 - val_loss: 1.1985\n",
  3874. "Epoch 2/5\n",
  3875. "291/291 [==============================] - 0s 1ms/step - loss: 0.9156 - val_loss: 0.7770\n",
  3876. "Epoch 3/5\n",
  3877. "291/291 [==============================] - 0s 1ms/step - loss: 0.6906 - val_loss: 0.7080\n",
  3878. "Epoch 4/5\n",
  3879. "291/291 [==============================] - 0s 1ms/step - loss: 0.6333 - val_loss: 0.6558\n",
  3880. "Epoch 5/5\n",
  3881. "291/291 [==============================] - 0s 1ms/step - loss: 0.5943 - val_loss: 0.6224\n",
  3882. "73/73 [==============================] - 0s 777us/step - loss: 0.6007\n",
  3883. "Epoch 1/5\n"
  3884. ]
  3885. },
  3886. {
  3887. "name": "stdout",
  3888. "output_type": "stream",
  3889. "text": [
  3890. "291/291 [==============================] - 0s 1ms/step - loss: 1.5283 - val_loss: 1.0011\n",
  3891. "Epoch 2/5\n",
  3892. "291/291 [==============================] - 0s 1ms/step - loss: 0.8318 - val_loss: 0.7538\n",
  3893. "Epoch 3/5\n",
  3894. "291/291 [==============================] - 0s 1ms/step - loss: 0.6750 - val_loss: 0.6752\n",
  3895. "Epoch 4/5\n",
  3896. "291/291 [==============================] - 0s 1ms/step - loss: 0.6013 - val_loss: 0.6110\n",
  3897. "Epoch 5/5\n",
  3898. "291/291 [==============================] - 0s 1ms/step - loss: 0.5507 - val_loss: 0.5656\n",
  3899. "73/73 [==============================] - 0s 833us/step - loss: 0.5598\n",
  3900. "Epoch 1/5\n",
  3901. "291/291 [==============================] - 0s 1ms/step - loss: 1.0966 - val_loss: 0.7408\n",
  3902. "Epoch 2/5\n",
  3903. "291/291 [==============================] - 0s 1ms/step - loss: 0.6507 - val_loss: 0.6474\n",
  3904. "Epoch 3/5\n",
  3905. "291/291 [==============================] - 0s 1ms/step - loss: 0.5789 - val_loss: 0.5892\n",
  3906. "Epoch 4/5\n",
  3907. "291/291 [==============================] - 0s 1ms/step - loss: 0.5334 - val_loss: 0.5373\n",
  3908. "Epoch 5/5\n",
  3909. "291/291 [==============================] - 0s 1ms/step - loss: 0.4910 - val_loss: 0.5109\n",
  3910. "73/73 [==============================] - 0s 808us/step - loss: 0.4498\n",
  3911. "Epoch 1/5\n",
  3912. "291/291 [==============================] - 0s 1ms/step - loss: 1.3416 - val_loss: 0.6886\n",
  3913. "Epoch 2/5\n",
  3914. "291/291 [==============================] - 0s 1ms/step - loss: 0.5824 - val_loss: 0.5943\n",
  3915. "Epoch 3/5\n",
  3916. "291/291 [==============================] - 0s 1ms/step - loss: 0.4919 - val_loss: 0.5130\n",
  3917. "Epoch 4/5\n",
  3918. "291/291 [==============================] - 0s 1ms/step - loss: 0.4395 - val_loss: 0.4550\n",
  3919. "Epoch 5/5\n",
  3920. "291/291 [==============================] - 0s 1ms/step - loss: 0.4179 - val_loss: 0.4339\n",
  3921. "73/73 [==============================] - 0s 1ms/step - loss: 0.4090\n",
  3922. "Epoch 1/5\n",
  3923. "291/291 [==============================] - 2s 6ms/step - loss: 0.9098 - val_loss: 0.7025\n",
  3924. "Epoch 2/5\n",
  3925. "291/291 [==============================] - 0s 2ms/step - loss: 0.5895 - val_loss: 0.5666\n",
  3926. "Epoch 3/5\n",
  3927. "291/291 [==============================] - 0s 1ms/step - loss: 0.4770 - val_loss: 0.4795\n",
  3928. "Epoch 4/5\n",
  3929. "291/291 [==============================] - 0s 1ms/step - loss: 0.4287 - val_loss: 0.5119\n",
  3930. "Epoch 5/5\n",
  3931. "291/291 [==============================] - 0s 1ms/step - loss: 0.4126 - val_loss: 0.4287\n",
  3932. "73/73 [==============================] - 0s 800us/step - loss: 0.4060\n",
  3933. "Epoch 1/5\n",
  3934. "291/291 [==============================] - 0s 1ms/step - loss: 0.8611 - val_loss: 0.6640\n",
  3935. "Epoch 2/5\n",
  3936. "291/291 [==============================] - 0s 1ms/step - loss: 0.5389 - val_loss: 4.0906\n",
  3937. "Epoch 3/5\n",
  3938. "291/291 [==============================] - 0s 2ms/step - loss: 0.4844 - val_loss: 0.4656\n",
  3939. "Epoch 4/5\n",
  3940. "291/291 [==============================] - 1s 2ms/step - loss: 0.4273 - val_loss: 0.4678\n",
  3941. "Epoch 5/5\n",
  3942. "291/291 [==============================] - 1s 2ms/step - loss: 0.4154 - val_loss: 0.4393\n",
  3943. "73/73 [==============================] - 0s 1ms/step - loss: 0.4407\n",
  3944. "Epoch 1/5\n",
  3945. "291/291 [==============================] - 1s 2ms/step - loss: 1.5652 - val_loss: 1.2856\n",
  3946. "Epoch 2/5\n",
  3947. "291/291 [==============================] - 0s 1ms/step - loss: 0.8777 - val_loss: 0.6851\n",
  3948. "Epoch 3/5\n",
  3949. "291/291 [==============================] - 0s 1ms/step - loss: 0.6127 - val_loss: 0.6236\n",
  3950. "Epoch 4/5\n",
  3951. "291/291 [==============================] - 0s 1ms/step - loss: 0.5360 - val_loss: 0.5387\n",
  3952. "Epoch 5/5\n",
  3953. "291/291 [==============================] - 0s 1ms/step - loss: 0.4947 - val_loss: 0.5119\n",
  3954. "73/73 [==============================] - 0s 883us/step - loss: 0.5119\n",
  3955. "Epoch 1/5\n",
  3956. "291/291 [==============================] - 0s 2ms/step - loss: 6.2815 - val_loss: 5.8696\n",
  3957. "Epoch 2/5\n",
  3958. "291/291 [==============================] - 0s 1ms/step - loss: 5.2052 - val_loss: 5.0919\n",
  3959. "Epoch 3/5\n",
  3960. "291/291 [==============================] - 0s 1ms/step - loss: 4.5904 - val_loss: 4.5599\n",
  3961. "Epoch 4/5\n",
  3962. "291/291 [==============================] - 0s 1ms/step - loss: 4.1326 - val_loss: 4.1366\n",
  3963. "Epoch 5/5\n",
  3964. "291/291 [==============================] - 0s 1ms/step - loss: 3.7566 - val_loss: 3.7805\n",
  3965. "73/73 [==============================] - 0s 809us/step - loss: 3.4556\n",
  3966. "Epoch 1/5\n",
  3967. "291/291 [==============================] - 0s 1ms/step - loss: 5.1626 - val_loss: 4.9619\n",
  3968. "Epoch 2/5\n",
  3969. "291/291 [==============================] - 0s 1ms/step - loss: 4.2879 - val_loss: 4.0761\n",
  3970. "Epoch 3/5\n",
  3971. "291/291 [==============================] - 0s 1ms/step - loss: 3.4177 - val_loss: 3.1874\n",
  3972. "Epoch 4/5\n",
  3973. "291/291 [==============================] - 0s 1ms/step - loss: 2.6382 - val_loss: 2.4648\n",
  3974. "Epoch 5/5\n",
  3975. "291/291 [==============================] - 0s 1ms/step - loss: 2.0553 - val_loss: 1.9711\n",
  3976. "73/73 [==============================] - 0s 785us/step - loss: 1.8437\n",
  3977. "Epoch 1/5\n",
  3978. "291/291 [==============================] - 0s 1ms/step - loss: 4.8162 - val_loss: 4.3934\n",
  3979. "Epoch 2/5\n",
  3980. "291/291 [==============================] - 0s 1ms/step - loss: 3.6430 - val_loss: 3.3615\n",
  3981. "Epoch 3/5\n",
  3982. "291/291 [==============================] - 0s 1ms/step - loss: 2.7633 - val_loss: 2.6008\n",
  3983. "Epoch 4/5\n",
  3984. "291/291 [==============================] - 1s 2ms/step - loss: 2.1290 - val_loss: 2.0789\n",
  3985. "Epoch 5/5\n",
  3986. "291/291 [==============================] - 1s 2ms/step - loss: 1.7092 - val_loss: 1.7428\n",
  3987. "73/73 [==============================] - 0s 855us/step - loss: 1.5776\n",
  3988. "Epoch 1/5\n",
  3989. "291/291 [==============================] - 1s 2ms/step - loss: 5.0626 - val_loss: 4.6340\n",
  3990. "Epoch 2/5\n",
  3991. "291/291 [==============================] - 0s 1ms/step - loss: 3.8428 - val_loss: 3.3945\n",
  3992. "Epoch 3/5\n",
  3993. "291/291 [==============================] - 0s 2ms/step - loss: 2.8159 - val_loss: 2.5003\n",
  3994. "Epoch 4/5\n",
  3995. "291/291 [==============================] - 0s 2ms/step - loss: 2.1188 - val_loss: 1.9421\n",
  3996. "Epoch 5/5\n",
  3997. "291/291 [==============================] - 0s 2ms/step - loss: 1.7170 - val_loss: 1.6501\n",
  3998. "73/73 [==============================] - 0s 1ms/step - loss: 1.5723\n",
  3999. "Epoch 1/5\n",
  4000. "291/291 [==============================] - 1s 2ms/step - loss: 5.6103 - val_loss: 5.4771\n",
  4001. "Epoch 2/5\n",
  4002. "291/291 [==============================] - 0s 1ms/step - loss: 4.8104 - val_loss: 4.7596\n",
  4003. "Epoch 3/5\n",
  4004. "291/291 [==============================] - 0s 1ms/step - loss: 4.1947 - val_loss: 4.1711\n",
  4005. "Epoch 4/5\n",
  4006. "291/291 [==============================] - 0s 1ms/step - loss: 3.6711 - val_loss: 3.6550\n",
  4007. "Epoch 5/5\n",
  4008. "291/291 [==============================] - 0s 2ms/step - loss: 3.2036 - val_loss: 3.1872\n",
  4009. "73/73 [==============================] - 0s 1ms/step - loss: 3.1116\n",
  4010. "Epoch 1/5\n",
  4011. "291/291 [==============================] - 1s 2ms/step - loss: 6.0842 - val_loss: 5.8855\n",
  4012. "Epoch 2/5\n",
  4013. "291/291 [==============================] - 0s 1ms/step - loss: 5.2507 - val_loss: 5.1372\n",
  4014. "Epoch 3/5\n",
  4015. "291/291 [==============================] - 0s 1ms/step - loss: 4.6145 - val_loss: 4.5471\n",
  4016. "Epoch 4/5\n",
  4017. "291/291 [==============================] - 0s 2ms/step - loss: 4.1047 - val_loss: 4.0676\n",
  4018. "Epoch 5/5\n",
  4019. "291/291 [==============================] - 0s 1ms/step - loss: 3.6863 - val_loss: 3.6688\n",
  4020. "73/73 [==============================] - 0s 1ms/step - loss: 3.3478\n",
  4021. "Epoch 1/5\n",
  4022. "291/291 [==============================] - 0s 1ms/step - loss: 5.2607 - val_loss: 5.0887\n",
  4023. "Epoch 2/5\n",
  4024. "291/291 [==============================] - 0s 1ms/step - loss: 4.5092 - val_loss: 4.3717\n",
  4025. "Epoch 3/5\n",
  4026. "291/291 [==============================] - 0s 1ms/step - loss: 3.8565 - val_loss: 3.7410\n",
  4027. "Epoch 4/5\n",
  4028. "291/291 [==============================] - 0s 1ms/step - loss: 3.2836 - val_loss: 3.1843\n",
  4029. "Epoch 5/5\n",
  4030. "291/291 [==============================] - 0s 1ms/step - loss: 2.7845 - val_loss: 2.7036\n",
  4031. "73/73 [==============================] - 0s 788us/step - loss: 2.6376\n",
  4032. "Epoch 1/5\n",
  4033. "291/291 [==============================] - 0s 1ms/step - loss: 5.2546 - val_loss: 5.3253\n",
  4034. "Epoch 2/5\n",
  4035. "291/291 [==============================] - 0s 1ms/step - loss: 4.9153 - val_loss: 4.9895\n",
  4036. "Epoch 3/5\n",
  4037. "291/291 [==============================] - 0s 1ms/step - loss: 4.5999 - val_loss: 4.6773\n",
  4038. "Epoch 4/5\n",
  4039. "291/291 [==============================] - 0s 1ms/step - loss: 4.3060 - val_loss: 4.3834\n",
  4040. "Epoch 5/5\n",
  4041. "291/291 [==============================] - 0s 1ms/step - loss: 4.0288 - val_loss: 4.1050\n",
  4042. "73/73 [==============================] - 0s 910us/step - loss: 3.9560\n",
  4043. "Epoch 1/5\n",
  4044. "291/291 [==============================] - 1s 2ms/step - loss: 5.6712 - val_loss: 5.7048\n",
  4045. "Epoch 2/5\n",
  4046. "291/291 [==============================] - 1s 2ms/step - loss: 5.3128 - val_loss: 5.3599\n",
  4047. "Epoch 3/5\n",
  4048. "291/291 [==============================] - 1s 2ms/step - loss: 4.9973 - val_loss: 5.0518\n",
  4049. "Epoch 4/5\n"
  4050. ]
  4051. },
  4052. {
  4053. "name": "stdout",
  4054. "output_type": "stream",
  4055. "text": [
  4056. "291/291 [==============================] - 1s 3ms/step - loss: 4.7127 - val_loss: 4.7711\n",
  4057. "Epoch 5/5\n",
  4058. "291/291 [==============================] - 0s 1ms/step - loss: 4.4507 - val_loss: 4.5109\n",
  4059. "73/73 [==============================] - 0s 767us/step - loss: 4.1631\n",
  4060. "Epoch 1/5\n",
  4061. "291/291 [==============================] - 0s 1ms/step - loss: 4.0911 - val_loss: 4.0414\n",
  4062. "Epoch 2/5\n",
  4063. "291/291 [==============================] - 0s 1ms/step - loss: 3.6222 - val_loss: 3.5913\n",
  4064. "Epoch 3/5\n",
  4065. "291/291 [==============================] - 0s 2ms/step - loss: 3.2316 - val_loss: 3.2131\n",
  4066. "Epoch 4/5\n",
  4067. "291/291 [==============================] - 0s 1ms/step - loss: 2.9007 - val_loss: 2.8942\n",
  4068. "Epoch 5/5\n",
  4069. "291/291 [==============================] - 0s 1ms/step - loss: 2.6237 - val_loss: 2.6286\n",
  4070. "73/73 [==============================] - 0s 902us/step - loss: 2.4640\n",
  4071. "Epoch 1/5\n",
  4072. "291/291 [==============================] - 0s 1ms/step - loss: 1.9715 - val_loss: 0.9200\n",
  4073. "Epoch 2/5\n",
  4074. "291/291 [==============================] - 0s 1ms/step - loss: 0.7871 - val_loss: 0.7300\n",
  4075. "Epoch 3/5\n",
  4076. "291/291 [==============================] - 0s 1ms/step - loss: 0.6737 - val_loss: 0.6735\n",
  4077. "Epoch 4/5\n",
  4078. "291/291 [==============================] - 0s 1ms/step - loss: 0.6296 - val_loss: 0.6350\n",
  4079. "Epoch 5/5\n",
  4080. "291/291 [==============================] - 0s 2ms/step - loss: 0.5985 - val_loss: 0.6077\n",
  4081. "73/73 [==============================] - 0s 807us/step - loss: 0.5657\n",
  4082. "Epoch 1/5\n",
  4083. "291/291 [==============================] - 0s 1ms/step - loss: 2.7332 - val_loss: 1.5698\n",
  4084. "Epoch 2/5\n",
  4085. "291/291 [==============================] - 0s 1ms/step - loss: 1.2712 - val_loss: 1.1817\n",
  4086. "Epoch 3/5\n",
  4087. "291/291 [==============================] - 0s 1ms/step - loss: 0.9797 - val_loss: 0.8973\n",
  4088. "Epoch 4/5\n",
  4089. "291/291 [==============================] - 0s 1ms/step - loss: 0.7523 - val_loss: 0.7174\n",
  4090. "Epoch 5/5\n",
  4091. "291/291 [==============================] - 0s 1ms/step - loss: 0.6471 - val_loss: 0.6661\n",
  4092. "73/73 [==============================] - 0s 816us/step - loss: 0.6276\n",
  4093. "Epoch 1/5\n",
  4094. "291/291 [==============================] - 0s 2ms/step - loss: 2.6570 - val_loss: 1.7487\n",
  4095. "Epoch 2/5\n",
  4096. "291/291 [==============================] - 0s 1ms/step - loss: 1.3777 - val_loss: 1.2991\n",
  4097. "Epoch 3/5\n",
  4098. "291/291 [==============================] - 0s 1ms/step - loss: 1.1124 - val_loss: 1.0747\n",
  4099. "Epoch 4/5\n",
  4100. "291/291 [==============================] - 0s 1ms/step - loss: 0.9391 - val_loss: 0.9125\n",
  4101. "Epoch 5/5\n",
  4102. "291/291 [==============================] - 0s 1ms/step - loss: 0.8143 - val_loss: 0.8078\n",
  4103. "73/73 [==============================] - 0s 785us/step - loss: 0.7731\n",
  4104. "Epoch 1/5\n",
  4105. "291/291 [==============================] - 0s 1ms/step - loss: 2.7348 - val_loss: 1.4765\n",
  4106. "Epoch 2/5\n",
  4107. "291/291 [==============================] - 0s 1ms/step - loss: 1.1625 - val_loss: 1.1437\n",
  4108. "Epoch 3/5\n",
  4109. "291/291 [==============================] - 0s 1ms/step - loss: 0.9672 - val_loss: 0.9821\n",
  4110. "Epoch 4/5\n",
  4111. "291/291 [==============================] - 0s 1ms/step - loss: 0.8518 - val_loss: 0.8776\n",
  4112. "Epoch 5/5\n",
  4113. "291/291 [==============================] - 0s 1ms/step - loss: 0.7743 - val_loss: 0.8102\n",
  4114. "73/73 [==============================] - 0s 801us/step - loss: 0.7489\n",
  4115. "Epoch 1/5\n",
  4116. "291/291 [==============================] - 0s 1ms/step - loss: 2.3529 - val_loss: 1.3884\n",
  4117. "Epoch 2/5\n",
  4118. "291/291 [==============================] - 0s 1ms/step - loss: 1.1067 - val_loss: 0.9939\n",
  4119. "Epoch 3/5\n",
  4120. "291/291 [==============================] - 0s 1ms/step - loss: 0.9236 - val_loss: 0.8744\n",
  4121. "Epoch 4/5\n",
  4122. "291/291 [==============================] - 0s 1ms/step - loss: 0.8257 - val_loss: 0.8059\n",
  4123. "Epoch 5/5\n",
  4124. "291/291 [==============================] - 0s 1ms/step - loss: 0.7538 - val_loss: 0.7608\n",
  4125. "73/73 [==============================] - 0s 764us/step - loss: 0.7438\n",
  4126. "Epoch 1/5\n",
  4127. "291/291 [==============================] - 0s 1ms/step - loss: 1.1032 - val_loss: 0.6575\n",
  4128. "Epoch 2/5\n",
  4129. "291/291 [==============================] - 0s 1ms/step - loss: 0.5835 - val_loss: 0.5613\n",
  4130. "Epoch 3/5\n",
  4131. "291/291 [==============================] - 0s 1ms/step - loss: 0.5118 - val_loss: 0.5108\n",
  4132. "Epoch 4/5\n",
  4133. "291/291 [==============================] - 0s 1ms/step - loss: 0.4767 - val_loss: 0.4801\n",
  4134. "Epoch 5/5\n",
  4135. "291/291 [==============================] - 0s 1ms/step - loss: 0.4571 - val_loss: 0.4639\n",
  4136. "73/73 [==============================] - 0s 815us/step - loss: 0.4166\n",
  4137. "Epoch 1/5\n",
  4138. "291/291 [==============================] - 0s 1ms/step - loss: 1.3559 - val_loss: 0.8263\n",
  4139. "Epoch 2/5\n",
  4140. "291/291 [==============================] - 0s 1ms/step - loss: 0.7389 - val_loss: 0.7271\n",
  4141. "Epoch 3/5\n",
  4142. "291/291 [==============================] - 0s 1ms/step - loss: 0.6579 - val_loss: 0.6601\n",
  4143. "Epoch 4/5\n",
  4144. "291/291 [==============================] - 0s 1ms/step - loss: 0.5941 - val_loss: 0.6058\n",
  4145. "Epoch 5/5\n",
  4146. "291/291 [==============================] - 0s 1ms/step - loss: 0.5478 - val_loss: 0.5684\n",
  4147. "73/73 [==============================] - 0s 811us/step - loss: 0.5247\n",
  4148. "Epoch 1/5\n",
  4149. "291/291 [==============================] - 0s 1ms/step - loss: 1.6783 - val_loss: 1.2288\n",
  4150. "Epoch 2/5\n",
  4151. "291/291 [==============================] - 0s 1ms/step - loss: 0.9129 - val_loss: 0.7413\n",
  4152. "Epoch 3/5\n",
  4153. "291/291 [==============================] - 0s 1ms/step - loss: 0.6655 - val_loss: 0.6729\n",
  4154. "Epoch 4/5\n",
  4155. "291/291 [==============================] - 0s 1ms/step - loss: 0.6131 - val_loss: 0.6273\n",
  4156. "Epoch 5/5\n",
  4157. "291/291 [==============================] - 0s 1ms/step - loss: 0.5714 - val_loss: 0.5821\n",
  4158. "73/73 [==============================] - 0s 803us/step - loss: 0.5535\n",
  4159. "Epoch 1/5\n",
  4160. "291/291 [==============================] - 0s 1ms/step - loss: 1.2879 - val_loss: 0.8117\n",
  4161. "Epoch 2/5\n",
  4162. "291/291 [==============================] - 0s 1ms/step - loss: 0.6771 - val_loss: 0.6789\n",
  4163. "Epoch 3/5\n",
  4164. "291/291 [==============================] - 0s 1ms/step - loss: 0.5949 - val_loss: 0.6125\n",
  4165. "Epoch 4/5\n",
  4166. "291/291 [==============================] - 0s 1ms/step - loss: 0.5496 - val_loss: 0.5674\n",
  4167. "Epoch 5/5\n",
  4168. "291/291 [==============================] - 0s 1ms/step - loss: 0.5103 - val_loss: 0.5310\n",
  4169. "73/73 [==============================] - 0s 742us/step - loss: 0.5254\n",
  4170. "Epoch 1/5\n",
  4171. "291/291 [==============================] - 0s 1ms/step - loss: 0.8752 - val_loss: 0.5577\n",
  4172. "Epoch 2/5\n",
  4173. "291/291 [==============================] - 0s 1ms/step - loss: 0.4914 - val_loss: 0.5044\n",
  4174. "Epoch 3/5\n",
  4175. "291/291 [==============================] - 0s 1ms/step - loss: 0.4559 - val_loss: 0.4679\n",
  4176. "Epoch 4/5\n",
  4177. "291/291 [==============================] - 0s 1ms/step - loss: 0.4321 - val_loss: 0.4639\n",
  4178. "Epoch 5/5\n",
  4179. "291/291 [==============================] - 0s 1ms/step - loss: 0.4174 - val_loss: 0.4495\n",
  4180. "73/73 [==============================] - 0s 761us/step - loss: 0.4507\n",
  4181. "Epoch 1/5\n",
  4182. "291/291 [==============================] - 0s 1ms/step - loss: 0.9477 - val_loss: 0.6377\n",
  4183. "Epoch 2/5\n",
  4184. "291/291 [==============================] - 0s 1ms/step - loss: 0.5449 - val_loss: 0.5340\n",
  4185. "Epoch 3/5\n",
  4186. "291/291 [==============================] - 0s 1ms/step - loss: 0.4696 - val_loss: 0.4722\n",
  4187. "Epoch 4/5\n",
  4188. "291/291 [==============================] - 0s 1ms/step - loss: 0.4608 - val_loss: 0.5270\n",
  4189. "Epoch 5/5\n",
  4190. "291/291 [==============================] - 0s 1ms/step - loss: 0.4435 - val_loss: 0.4458\n",
  4191. "73/73 [==============================] - 0s 809us/step - loss: 0.4035\n",
  4192. "Epoch 1/5\n",
  4193. "291/291 [==============================] - 0s 1ms/step - loss: 0.7668 - val_loss: 0.5660\n",
  4194. "Epoch 2/5\n",
  4195. "291/291 [==============================] - 0s 1ms/step - loss: 0.4922 - val_loss: 0.5190\n",
  4196. "Epoch 3/5\n",
  4197. "291/291 [==============================] - 0s 1ms/step - loss: 0.4928 - val_loss: 0.4735\n",
  4198. "Epoch 4/5\n",
  4199. "291/291 [==============================] - 0s 1ms/step - loss: 0.4284 - val_loss: 0.4385\n",
  4200. "Epoch 5/5\n",
  4201. "291/291 [==============================] - 0s 1ms/step - loss: 0.4066 - val_loss: 0.4424\n",
  4202. "73/73 [==============================] - 0s 784us/step - loss: 0.4197\n",
  4203. "Epoch 1/5\n",
  4204. "291/291 [==============================] - 0s 1ms/step - loss: 0.9284 - val_loss: 0.5801\n",
  4205. "Epoch 2/5\n",
  4206. "291/291 [==============================] - 0s 1ms/step - loss: 0.5115 - val_loss: 0.5188\n",
  4207. "Epoch 3/5\n",
  4208. "291/291 [==============================] - 0s 1ms/step - loss: 0.4733 - val_loss: 0.4776\n",
  4209. "Epoch 4/5\n",
  4210. "291/291 [==============================] - 0s 1ms/step - loss: 0.4373 - val_loss: 0.4539\n",
  4211. "Epoch 5/5\n",
  4212. "291/291 [==============================] - 0s 1ms/step - loss: 0.4194 - val_loss: 0.4276\n",
  4213. "73/73 [==============================] - 0s 730us/step - loss: 0.4150\n",
  4214. "Epoch 1/5\n"
  4215. ]
  4216. },
  4217. {
  4218. "name": "stdout",
  4219. "output_type": "stream",
  4220. "text": [
  4221. "291/291 [==============================] - 0s 1ms/step - loss: 1.0626 - val_loss: 0.5508\n",
  4222. "Epoch 2/5\n",
  4223. "291/291 [==============================] - 0s 1ms/step - loss: 0.4793 - val_loss: 0.4605\n",
  4224. "Epoch 3/5\n",
  4225. "291/291 [==============================] - 0s 1ms/step - loss: 0.4268 - val_loss: 0.4409\n",
  4226. "Epoch 4/5\n",
  4227. "291/291 [==============================] - 0s 1ms/step - loss: 0.4026 - val_loss: 0.4066\n",
  4228. "Epoch 5/5\n",
  4229. "291/291 [==============================] - 0s 1ms/step - loss: 0.3897 - val_loss: 0.4155\n",
  4230. "73/73 [==============================] - 0s 748us/step - loss: 0.4110\n",
  4231. "Epoch 1/5\n",
  4232. "291/291 [==============================] - 0s 1ms/step - loss: 1.2784 - val_loss: 0.7612\n",
  4233. "Epoch 2/5\n",
  4234. "291/291 [==============================] - 0s 1ms/step - loss: 0.5864 - val_loss: 0.5595\n",
  4235. "Epoch 3/5\n",
  4236. "291/291 [==============================] - 0s 1ms/step - loss: 0.4915 - val_loss: 0.5030\n",
  4237. "Epoch 4/5\n",
  4238. "291/291 [==============================] - 0s 1ms/step - loss: 0.4468 - val_loss: 0.4685\n",
  4239. "Epoch 5/5\n",
  4240. "291/291 [==============================] - 0s 1ms/step - loss: 0.4165 - val_loss: 0.4279\n",
  4241. "73/73 [==============================] - 0s 782us/step - loss: 0.4290\n",
  4242. "Epoch 1/5\n",
  4243. "291/291 [==============================] - 0s 1ms/step - loss: 4.9371 - val_loss: 4.6894\n",
  4244. "Epoch 2/5\n",
  4245. "291/291 [==============================] - 0s 1ms/step - loss: 4.0600 - val_loss: 3.7815\n",
  4246. "Epoch 3/5\n",
  4247. "291/291 [==============================] - 0s 1ms/step - loss: 3.2163 - val_loss: 2.9661\n",
  4248. "Epoch 4/5\n",
  4249. "291/291 [==============================] - 0s 1ms/step - loss: 2.4913 - val_loss: 2.2945\n",
  4250. "Epoch 5/5\n",
  4251. "291/291 [==============================] - 0s 1ms/step - loss: 1.9238 - val_loss: 1.8019\n",
  4252. "73/73 [==============================] - 0s 744us/step - loss: 1.6316\n",
  4253. "Epoch 1/5\n",
  4254. "291/291 [==============================] - 0s 1ms/step - loss: 5.8375 - val_loss: 5.6362\n",
  4255. "Epoch 2/5\n",
  4256. "291/291 [==============================] - 0s 1ms/step - loss: 5.0092 - val_loss: 4.9306\n",
  4257. "Epoch 3/5\n",
  4258. "291/291 [==============================] - 0s 1ms/step - loss: 4.3940 - val_loss: 4.3509\n",
  4259. "Epoch 4/5\n",
  4260. "291/291 [==============================] - 0s 1ms/step - loss: 3.8743 - val_loss: 3.8614\n",
  4261. "Epoch 5/5\n",
  4262. "291/291 [==============================] - 0s 1ms/step - loss: 3.4377 - val_loss: 3.4476\n",
  4263. "73/73 [==============================] - 0s 862us/step - loss: 3.3255\n",
  4264. "Epoch 1/5\n",
  4265. "291/291 [==============================] - 0s 1ms/step - loss: 4.9759 - val_loss: 4.7916\n",
  4266. "Epoch 2/5\n",
  4267. "291/291 [==============================] - 0s 1ms/step - loss: 4.1281 - val_loss: 3.9506\n",
  4268. "Epoch 3/5\n",
  4269. "291/291 [==============================] - 0s 1ms/step - loss: 3.3538 - val_loss: 3.1669\n",
  4270. "Epoch 4/5\n",
  4271. "291/291 [==============================] - 0s 1ms/step - loss: 2.6259 - val_loss: 2.4511\n",
  4272. "Epoch 5/5\n",
  4273. "291/291 [==============================] - 0s 1ms/step - loss: 2.0386 - val_loss: 1.9271\n",
  4274. "73/73 [==============================] - 0s 903us/step - loss: 1.7830\n",
  4275. "Epoch 1/5\n",
  4276. "291/291 [==============================] - 0s 1ms/step - loss: 4.4325 - val_loss: 3.8219\n",
  4277. "Epoch 2/5\n",
  4278. "291/291 [==============================] - 0s 1ms/step - loss: 3.1094 - val_loss: 2.7308\n",
  4279. "Epoch 3/5\n",
  4280. "291/291 [==============================] - 0s 1ms/step - loss: 2.2000 - val_loss: 2.0279\n",
  4281. "Epoch 4/5\n",
  4282. "291/291 [==============================] - 0s 1ms/step - loss: 1.6315 - val_loss: 1.6520\n",
  4283. "Epoch 5/5\n",
  4284. "291/291 [==============================] - 0s 1ms/step - loss: 1.3275 - val_loss: 1.4663\n",
  4285. "73/73 [==============================] - 0s 793us/step - loss: 1.6378\n",
  4286. "Epoch 1/5\n",
  4287. "291/291 [==============================] - 0s 1ms/step - loss: 4.0869 - val_loss: 3.7319\n",
  4288. "Epoch 2/5\n",
  4289. "291/291 [==============================] - 0s 1ms/step - loss: 3.1193 - val_loss: 2.8999\n",
  4290. "Epoch 3/5\n",
  4291. "291/291 [==============================] - 0s 1ms/step - loss: 2.4533 - val_loss: 2.3178\n",
  4292. "Epoch 4/5\n",
  4293. "291/291 [==============================] - 0s 1ms/step - loss: 2.0165 - val_loss: 1.9623\n",
  4294. "Epoch 5/5\n",
  4295. "291/291 [==============================] - 0s 1ms/step - loss: 1.7559 - val_loss: 1.7491\n",
  4296. "73/73 [==============================] - 0s 806us/step - loss: 1.6231\n",
  4297. "Epoch 1/5\n",
  4298. "291/291 [==============================] - 0s 2ms/step - loss: 5.1664 - val_loss: 5.1030\n",
  4299. "Epoch 2/5\n",
  4300. "291/291 [==============================] - 0s 1ms/step - loss: 4.6720 - val_loss: 4.6178\n",
  4301. "Epoch 3/5\n",
  4302. "291/291 [==============================] - 0s 1ms/step - loss: 4.2207 - val_loss: 4.1746\n",
  4303. "Epoch 4/5\n",
  4304. "291/291 [==============================] - 0s 1ms/step - loss: 3.8074 - val_loss: 3.7682\n",
  4305. "Epoch 5/5\n",
  4306. "291/291 [==============================] - 0s 1ms/step - loss: 3.4290 - val_loss: 3.3985\n",
  4307. "73/73 [==============================] - 0s 795us/step - loss: 3.1035\n",
  4308. "Epoch 1/5\n",
  4309. "291/291 [==============================] - 0s 1ms/step - loss: 6.0166 - val_loss: 5.5802\n",
  4310. "Epoch 2/5\n",
  4311. "291/291 [==============================] - 0s 1ms/step - loss: 4.7776 - val_loss: 4.5192\n",
  4312. "Epoch 3/5\n",
  4313. "291/291 [==============================] - 0s 1ms/step - loss: 3.9017 - val_loss: 3.7250\n",
  4314. "Epoch 4/5\n",
  4315. "291/291 [==============================] - 0s 1ms/step - loss: 3.2220 - val_loss: 3.0839\n",
  4316. "Epoch 5/5\n",
  4317. "291/291 [==============================] - 0s 1ms/step - loss: 2.6756 - val_loss: 2.5711\n",
  4318. "73/73 [==============================] - 0s 768us/step - loss: 2.4867\n",
  4319. "Epoch 1/5\n",
  4320. "291/291 [==============================] - 0s 1ms/step - loss: 5.4434 - val_loss: 5.2485\n",
  4321. "Epoch 2/5\n",
  4322. "291/291 [==============================] - 0s 1ms/step - loss: 4.5123 - val_loss: 4.3123\n",
  4323. "Epoch 3/5\n",
  4324. "291/291 [==============================] - 0s 1ms/step - loss: 3.6942 - val_loss: 3.5131\n",
  4325. "Epoch 4/5\n",
  4326. "291/291 [==============================] - 0s 1ms/step - loss: 3.0165 - val_loss: 2.8602\n",
  4327. "Epoch 5/5\n",
  4328. "291/291 [==============================] - 0s 1ms/step - loss: 2.4742 - val_loss: 2.3453\n",
  4329. "73/73 [==============================] - 0s 760us/step - loss: 2.2334\n",
  4330. "Epoch 1/5\n",
  4331. "291/291 [==============================] - 0s 1ms/step - loss: 5.3227 - val_loss: 5.1978\n",
  4332. "Epoch 2/5\n",
  4333. "291/291 [==============================] - 0s 1ms/step - loss: 4.7438 - val_loss: 4.6406\n",
  4334. "Epoch 3/5\n",
  4335. "291/291 [==============================] - 0s 1ms/step - loss: 4.2253 - val_loss: 4.1376\n",
  4336. "Epoch 4/5\n",
  4337. "291/291 [==============================] - 0s 1ms/step - loss: 3.7565 - val_loss: 3.6787\n",
  4338. "Epoch 5/5\n",
  4339. "291/291 [==============================] - 0s 1ms/step - loss: 3.3281 - val_loss: 3.2587\n",
  4340. "73/73 [==============================] - 0s 757us/step - loss: 3.0075\n",
  4341. "Epoch 1/5\n",
  4342. "291/291 [==============================] - 0s 1ms/step - loss: 5.6666 - val_loss: 5.7201\n",
  4343. "Epoch 2/5\n",
  4344. "291/291 [==============================] - 0s 1ms/step - loss: 5.2248 - val_loss: 5.3056\n",
  4345. "Epoch 3/5\n",
  4346. "291/291 [==============================] - 0s 1ms/step - loss: 4.8551 - val_loss: 4.9493\n",
  4347. "Epoch 4/5\n",
  4348. "291/291 [==============================] - 0s 1ms/step - loss: 4.5315 - val_loss: 4.6310\n",
  4349. "Epoch 5/5\n",
  4350. "291/291 [==============================] - 0s 1ms/step - loss: 4.2401 - val_loss: 4.3405\n",
  4351. "73/73 [==============================] - 0s 793us/step - loss: 4.2490\n",
  4352. "Epoch 1/5\n",
  4353. "291/291 [==============================] - 0s 1ms/step - loss: 2.7997 - val_loss: 1.6501\n",
  4354. "Epoch 2/5\n",
  4355. "291/291 [==============================] - 0s 1ms/step - loss: 1.3973 - val_loss: 1.3409\n",
  4356. "Epoch 3/5\n",
  4357. "291/291 [==============================] - 0s 1ms/step - loss: 1.1599 - val_loss: 1.0921\n",
  4358. "Epoch 4/5\n",
  4359. "291/291 [==============================] - 0s 1ms/step - loss: 0.9079 - val_loss: 0.8049\n",
  4360. "Epoch 5/5\n",
  4361. "291/291 [==============================] - 0s 1ms/step - loss: 0.6927 - val_loss: 0.6544\n",
  4362. "73/73 [==============================] - 0s 803us/step - loss: 0.6152\n",
  4363. "Epoch 1/5\n",
  4364. "291/291 [==============================] - 0s 1ms/step - loss: 2.6487 - val_loss: 1.3596\n",
  4365. "Epoch 2/5\n",
  4366. "291/291 [==============================] - 0s 1ms/step - loss: 1.0703 - val_loss: 0.9311\n",
  4367. "Epoch 3/5\n",
  4368. "291/291 [==============================] - 0s 1ms/step - loss: 0.7962 - val_loss: 0.7387\n",
  4369. "Epoch 4/5\n",
  4370. "291/291 [==============================] - 0s 1ms/step - loss: 0.6686 - val_loss: 0.6662\n",
  4371. "Epoch 5/5\n",
  4372. "291/291 [==============================] - 0s 1ms/step - loss: 0.6169 - val_loss: 0.6439\n",
  4373. "73/73 [==============================] - 0s 735us/step - loss: 0.5928\n",
  4374. "Epoch 1/5\n",
  4375. "291/291 [==============================] - 0s 1ms/step - loss: 2.2719 - val_loss: 1.2279\n",
  4376. "Epoch 2/5\n",
  4377. "291/291 [==============================] - 0s 1ms/step - loss: 1.0016 - val_loss: 0.8730\n",
  4378. "Epoch 3/5\n",
  4379. "291/291 [==============================] - 0s 1ms/step - loss: 0.7864 - val_loss: 0.7710\n"
  4380. ]
  4381. },
  4382. {
  4383. "name": "stdout",
  4384. "output_type": "stream",
  4385. "text": [
  4386. "Epoch 4/5\n",
  4387. "291/291 [==============================] - 0s 1ms/step - loss: 0.7165 - val_loss: 0.7285\n",
  4388. "Epoch 5/5\n",
  4389. "291/291 [==============================] - 0s 1ms/step - loss: 0.6785 - val_loss: 0.6995\n",
  4390. "73/73 [==============================] - 0s 733us/step - loss: 0.6663\n",
  4391. "Epoch 1/5\n",
  4392. "291/291 [==============================] - 0s 1ms/step - loss: 2.2156 - val_loss: 1.4413\n",
  4393. "Epoch 2/5\n",
  4394. "291/291 [==============================] - 0s 1ms/step - loss: 1.0867 - val_loss: 0.8911\n",
  4395. "Epoch 3/5\n",
  4396. "291/291 [==============================] - 0s 1ms/step - loss: 0.7237 - val_loss: 0.6792\n",
  4397. "Epoch 4/5\n",
  4398. "291/291 [==============================] - 0s 1ms/step - loss: 0.6057 - val_loss: 0.6232\n",
  4399. "Epoch 5/5\n",
  4400. "291/291 [==============================] - 0s 1ms/step - loss: 0.5600 - val_loss: 0.5879\n",
  4401. "73/73 [==============================] - 0s 749us/step - loss: 0.5722\n",
  4402. "Epoch 1/5\n",
  4403. "291/291 [==============================] - 0s 1ms/step - loss: 2.6005 - val_loss: 1.2742\n",
  4404. "Epoch 2/5\n",
  4405. "291/291 [==============================] - 0s 1ms/step - loss: 1.0069 - val_loss: 0.9386\n",
  4406. "Epoch 3/5\n",
  4407. "291/291 [==============================] - 0s 1ms/step - loss: 0.8379 - val_loss: 0.8460\n",
  4408. "Epoch 4/5\n",
  4409. "291/291 [==============================] - 0s 1ms/step - loss: 0.7713 - val_loss: 0.7948\n",
  4410. "Epoch 5/5\n",
  4411. "291/291 [==============================] - 0s 1ms/step - loss: 0.7299 - val_loss: 0.7609\n",
  4412. "73/73 [==============================] - 0s 799us/step - loss: 0.7603\n",
  4413. "Epoch 1/5\n",
  4414. "291/291 [==============================] - 0s 1ms/step - loss: 1.1858 - val_loss: 0.7471\n",
  4415. "Epoch 2/5\n",
  4416. "291/291 [==============================] - 0s 1ms/step - loss: 0.6326 - val_loss: 0.6372\n",
  4417. "Epoch 3/5\n",
  4418. "291/291 [==============================] - 0s 1ms/step - loss: 0.5705 - val_loss: 0.5735\n",
  4419. "Epoch 4/5\n",
  4420. "291/291 [==============================] - 0s 1ms/step - loss: 0.5218 - val_loss: 0.5324\n",
  4421. "Epoch 5/5\n",
  4422. "291/291 [==============================] - 0s 1ms/step - loss: 0.4867 - val_loss: 0.4955\n",
  4423. "73/73 [==============================] - 0s 810us/step - loss: 0.4433\n",
  4424. "Epoch 1/5\n",
  4425. "291/291 [==============================] - 0s 1ms/step - loss: 1.2401 - val_loss: 0.6763\n",
  4426. "Epoch 2/5\n",
  4427. "291/291 [==============================] - 0s 1ms/step - loss: 0.5773 - val_loss: 0.5786\n",
  4428. "Epoch 3/5\n",
  4429. "291/291 [==============================] - 0s 1ms/step - loss: 0.5061 - val_loss: 0.5134\n",
  4430. "Epoch 4/5\n",
  4431. "291/291 [==============================] - 0s 1ms/step - loss: 0.4800 - val_loss: 0.4791\n",
  4432. "Epoch 5/5\n",
  4433. "291/291 [==============================] - 0s 1ms/step - loss: 0.4337 - val_loss: 0.4505\n",
  4434. "73/73 [==============================] - 0s 829us/step - loss: 0.4331\n",
  4435. "Epoch 1/5\n",
  4436. "291/291 [==============================] - 0s 1ms/step - loss: 1.0934 - val_loss: 0.6417\n",
  4437. "Epoch 2/5\n",
  4438. "291/291 [==============================] - 0s 1ms/step - loss: 0.5383 - val_loss: 0.4981\n",
  4439. "Epoch 3/5\n",
  4440. "291/291 [==============================] - 0s 1ms/step - loss: 0.4561 - val_loss: 0.4645\n",
  4441. "Epoch 4/5\n",
  4442. "291/291 [==============================] - 0s 1ms/step - loss: 0.4288 - val_loss: 0.4454\n",
  4443. "Epoch 5/5\n",
  4444. "291/291 [==============================] - 0s 1ms/step - loss: 0.4108 - val_loss: 0.4257\n",
  4445. "73/73 [==============================] - 0s 764us/step - loss: 0.4131\n",
  4446. "Epoch 1/5\n",
  4447. "291/291 [==============================] - 0s 1ms/step - loss: 1.3462 - val_loss: 0.6799\n",
  4448. "Epoch 2/5\n",
  4449. "291/291 [==============================] - 0s 1ms/step - loss: 0.5891 - val_loss: 0.6265\n",
  4450. "Epoch 3/5\n",
  4451. "291/291 [==============================] - 0s 1ms/step - loss: 0.5313 - val_loss: 0.5565\n",
  4452. "Epoch 4/5\n",
  4453. "291/291 [==============================] - 0s 1ms/step - loss: 0.4904 - val_loss: 0.5141\n",
  4454. "Epoch 5/5\n",
  4455. "291/291 [==============================] - 0s 1ms/step - loss: 0.4598 - val_loss: 0.4770\n",
  4456. "73/73 [==============================] - 0s 730us/step - loss: 0.4786\n",
  4457. "Epoch 1/5\n",
  4458. "291/291 [==============================] - 0s 1ms/step - loss: 1.0841 - val_loss: 0.6720\n",
  4459. "Epoch 2/5\n",
  4460. "291/291 [==============================] - 0s 1ms/step - loss: 0.5812 - val_loss: 0.5870\n",
  4461. "Epoch 3/5\n",
  4462. "291/291 [==============================] - 0s 1ms/step - loss: 0.5172 - val_loss: 0.5331\n",
  4463. "Epoch 4/5\n",
  4464. "291/291 [==============================] - 0s 1ms/step - loss: 0.4688 - val_loss: 0.4948\n",
  4465. "Epoch 5/5\n",
  4466. "291/291 [==============================] - 0s 1ms/step - loss: 0.4430 - val_loss: 0.4565\n",
  4467. "73/73 [==============================] - 0s 822us/step - loss: 0.4480\n",
  4468. "Epoch 1/5\n",
  4469. "291/291 [==============================] - 0s 1ms/step - loss: 0.9568 - val_loss: 0.5939\n",
  4470. "Epoch 2/5\n",
  4471. "291/291 [==============================] - 0s 1ms/step - loss: 0.5135 - val_loss: 0.4955\n",
  4472. "Epoch 3/5\n",
  4473. "291/291 [==============================] - 0s 1ms/step - loss: 0.4699 - val_loss: 0.4599\n",
  4474. "Epoch 4/5\n",
  4475. "291/291 [==============================] - 0s 1ms/step - loss: 0.4333 - val_loss: 0.4295\n",
  4476. "Epoch 5/5\n",
  4477. "291/291 [==============================] - 0s 1ms/step - loss: 0.4062 - val_loss: 0.4118\n",
  4478. "73/73 [==============================] - 0s 856us/step - loss: 0.3633\n",
  4479. "Epoch 1/5\n",
  4480. "291/291 [==============================] - 0s 1ms/step - loss: 0.7367 - val_loss: 0.5581\n",
  4481. "Epoch 2/5\n",
  4482. "291/291 [==============================] - 0s 1ms/step - loss: 0.4635 - val_loss: 0.4780\n",
  4483. "Epoch 3/5\n",
  4484. "291/291 [==============================] - 0s 1ms/step - loss: 0.4197 - val_loss: 0.4290\n",
  4485. "Epoch 4/5\n",
  4486. "291/291 [==============================] - 0s 1ms/step - loss: 0.4007 - val_loss: 0.4019\n",
  4487. "Epoch 5/5\n",
  4488. "291/291 [==============================] - 0s 1ms/step - loss: 0.3878 - val_loss: 0.4003\n",
  4489. "73/73 [==============================] - 0s 771us/step - loss: 0.3965\n",
  4490. "Epoch 1/5\n",
  4491. "291/291 [==============================] - 0s 1ms/step - loss: 1.0118 - val_loss: 0.5843\n",
  4492. "Epoch 2/5\n",
  4493. "291/291 [==============================] - 0s 1ms/step - loss: 0.4891 - val_loss: 0.4795\n",
  4494. "Epoch 3/5\n",
  4495. "291/291 [==============================] - 0s 1ms/step - loss: 0.4323 - val_loss: 0.4583\n",
  4496. "Epoch 4/5\n",
  4497. "291/291 [==============================] - 0s 1ms/step - loss: 0.4018 - val_loss: 0.4223\n",
  4498. "Epoch 5/5\n",
  4499. "291/291 [==============================] - 0s 1ms/step - loss: 0.3855 - val_loss: 0.4107\n",
  4500. "73/73 [==============================] - 0s 766us/step - loss: 0.3947\n",
  4501. "Epoch 1/5\n",
  4502. "291/291 [==============================] - 0s 1ms/step - loss: 0.8421 - val_loss: 0.5637\n",
  4503. "Epoch 2/5\n",
  4504. "291/291 [==============================] - 0s 1ms/step - loss: 0.4724 - val_loss: 0.4849\n",
  4505. "Epoch 3/5\n",
  4506. "291/291 [==============================] - 0s 1ms/step - loss: 0.4231 - val_loss: 0.4400\n",
  4507. "Epoch 4/5\n",
  4508. "291/291 [==============================] - 0s 1ms/step - loss: 0.4043 - val_loss: 0.4176\n",
  4509. "Epoch 5/5\n",
  4510. "291/291 [==============================] - 0s 1ms/step - loss: 0.3900 - val_loss: 0.4057\n",
  4511. "73/73 [==============================] - 0s 770us/step - loss: 0.4130\n",
  4512. "Epoch 1/5\n",
  4513. "291/291 [==============================] - 0s 1ms/step - loss: 0.9421 - val_loss: 0.5976\n",
  4514. "Epoch 2/5\n",
  4515. "291/291 [==============================] - 0s 1ms/step - loss: 0.4795 - val_loss: 0.4711\n",
  4516. "Epoch 3/5\n",
  4517. "291/291 [==============================] - 0s 1ms/step - loss: 0.4108 - val_loss: 0.4365\n",
  4518. "Epoch 4/5\n",
  4519. "291/291 [==============================] - 0s 1ms/step - loss: 0.3857 - val_loss: 0.3944\n",
  4520. "Epoch 5/5\n",
  4521. "291/291 [==============================] - 0s 1ms/step - loss: 0.3764 - val_loss: 0.4734\n",
  4522. "73/73 [==============================] - 0s 792us/step - loss: 0.4805\n",
  4523. "Epoch 1/5\n",
  4524. "291/291 [==============================] - 0s 1ms/step - loss: 5.1226 - val_loss: 4.8427\n",
  4525. "Epoch 2/5\n",
  4526. "291/291 [==============================] - 0s 1ms/step - loss: 4.2120 - val_loss: 3.9700\n",
  4527. "Epoch 3/5\n",
  4528. "291/291 [==============================] - 0s 1ms/step - loss: 3.4064 - val_loss: 3.1738\n",
  4529. "Epoch 4/5\n",
  4530. "291/291 [==============================] - 0s 1ms/step - loss: 2.6846 - val_loss: 2.4814\n",
  4531. "Epoch 5/5\n",
  4532. "291/291 [==============================] - 0s 1ms/step - loss: 2.0833 - val_loss: 1.9258\n",
  4533. "73/73 [==============================] - 0s 772us/step - loss: 1.7926\n",
  4534. "Epoch 1/5\n",
  4535. "291/291 [==============================] - 0s 1ms/step - loss: 4.9386 - val_loss: 4.4433\n",
  4536. "Epoch 2/5\n",
  4537. "291/291 [==============================] - 0s 1ms/step - loss: 3.6643 - val_loss: 3.3120\n",
  4538. "Epoch 3/5\n",
  4539. "291/291 [==============================] - 0s 1ms/step - loss: 2.7013 - val_loss: 2.4452\n",
  4540. "Epoch 4/5\n",
  4541. "291/291 [==============================] - 0s 1ms/step - loss: 2.0273 - val_loss: 1.8993\n",
  4542. "Epoch 5/5\n",
  4543. "291/291 [==============================] - 0s 1ms/step - loss: 1.6428 - val_loss: 1.6118\n",
  4544. "73/73 [==============================] - 0s 817us/step - loss: 1.4589\n",
  4545. "Epoch 1/5\n"
  4546. ]
  4547. },
  4548. {
  4549. "name": "stdout",
  4550. "output_type": "stream",
  4551. "text": [
  4552. "291/291 [==============================] - 0s 1ms/step - loss: 4.8066 - val_loss: 4.2130\n",
  4553. "Epoch 2/5\n",
  4554. "291/291 [==============================] - 0s 1ms/step - loss: 3.3039 - val_loss: 2.8527\n",
  4555. "Epoch 3/5\n",
  4556. "291/291 [==============================] - 0s 1ms/step - loss: 2.2011 - val_loss: 1.9228\n",
  4557. "Epoch 4/5\n",
  4558. "291/291 [==============================] - 0s 1ms/step - loss: 1.5465 - val_loss: 1.4534\n",
  4559. "Epoch 5/5\n",
  4560. "291/291 [==============================] - 0s 1ms/step - loss: 1.2385 - val_loss: 1.2450\n",
  4561. "73/73 [==============================] - 0s 769us/step - loss: 1.0902\n",
  4562. "Epoch 1/5\n",
  4563. "291/291 [==============================] - 0s 1ms/step - loss: 5.1400 - val_loss: 4.8425\n",
  4564. "Epoch 2/5\n",
  4565. "291/291 [==============================] - 0s 1ms/step - loss: 4.2102 - val_loss: 3.9364\n",
  4566. "Epoch 3/5\n",
  4567. "291/291 [==============================] - 0s 1ms/step - loss: 3.3653 - val_loss: 3.1008\n",
  4568. "Epoch 4/5\n",
  4569. "291/291 [==============================] - 0s 1ms/step - loss: 2.6069 - val_loss: 2.3818\n",
  4570. "Epoch 5/5\n",
  4571. "291/291 [==============================] - 0s 1ms/step - loss: 1.9888 - val_loss: 1.8328\n",
  4572. "73/73 [==============================] - 0s 797us/step - loss: 1.6850\n",
  4573. "Epoch 1/5\n",
  4574. "291/291 [==============================] - 0s 1ms/step - loss: 4.7017 - val_loss: 4.4679\n",
  4575. "Epoch 2/5\n",
  4576. "291/291 [==============================] - 0s 1ms/step - loss: 3.8174 - val_loss: 3.5983\n",
  4577. "Epoch 3/5\n",
  4578. "291/291 [==============================] - 0s 1ms/step - loss: 3.0371 - val_loss: 2.8370\n",
  4579. "Epoch 4/5\n",
  4580. "291/291 [==============================] - 0s 1ms/step - loss: 2.3840 - val_loss: 2.2256\n",
  4581. "Epoch 5/5\n",
  4582. "291/291 [==============================] - 0s 1ms/step - loss: 1.8937 - val_loss: 1.7944\n",
  4583. "73/73 [==============================] - 0s 783us/step - loss: 1.7399\n",
  4584. "Epoch 1/5\n",
  4585. "291/291 [==============================] - 0s 1ms/step - loss: 6.4060 - val_loss: 5.8144\n",
  4586. "Epoch 2/5\n",
  4587. "291/291 [==============================] - 0s 1ms/step - loss: 4.9339 - val_loss: 4.5489\n",
  4588. "Epoch 3/5\n",
  4589. "291/291 [==============================] - 0s 1ms/step - loss: 3.9202 - val_loss: 3.6587\n",
  4590. "Epoch 4/5\n",
  4591. "291/291 [==============================] - 0s 1ms/step - loss: 3.1834 - val_loss: 2.9946\n",
  4592. "Epoch 5/5\n",
  4593. "291/291 [==============================] - 0s 1ms/step - loss: 2.6369 - val_loss: 2.5051\n",
  4594. "73/73 [==============================] - 0s 761us/step - loss: 2.4104\n",
  4595. "Epoch 1/5\n",
  4596. "291/291 [==============================] - 0s 1ms/step - loss: 5.4875 - val_loss: 5.4503\n",
  4597. "Epoch 2/5\n",
  4598. "291/291 [==============================] - 1s 2ms/step - loss: 4.9220 - val_loss: 4.9081\n",
  4599. "Epoch 3/5\n",
  4600. "291/291 [==============================] - 0s 1ms/step - loss: 4.4276 - val_loss: 4.4201\n",
  4601. "Epoch 4/5\n",
  4602. "291/291 [==============================] - 0s 1ms/step - loss: 3.9750 - val_loss: 3.9686\n",
  4603. "Epoch 5/5\n",
  4604. "291/291 [==============================] - 0s 1ms/step - loss: 3.5547 - val_loss: 3.5472\n",
  4605. "73/73 [==============================] - 0s 765us/step - loss: 3.4474\n",
  4606. "Epoch 1/5\n",
  4607. "291/291 [==============================] - 0s 1ms/step - loss: 5.6257 - val_loss: 5.6028\n",
  4608. "Epoch 2/5\n",
  4609. "291/291 [==============================] - 0s 1ms/step - loss: 5.1078 - val_loss: 5.1239\n",
  4610. "Epoch 3/5\n",
  4611. "291/291 [==============================] - 0s 1ms/step - loss: 4.6758 - val_loss: 4.7036\n",
  4612. "Epoch 4/5\n",
  4613. "291/291 [==============================] - 0s 1ms/step - loss: 4.2877 - val_loss: 4.3166\n",
  4614. "Epoch 5/5\n",
  4615. "291/291 [==============================] - 0s 1ms/step - loss: 3.9267 - val_loss: 3.9512\n",
  4616. "73/73 [==============================] - 0s 807us/step - loss: 3.8029\n",
  4617. "Epoch 1/5\n",
  4618. "291/291 [==============================] - 0s 1ms/step - loss: 5.7281 - val_loss: 5.5692\n",
  4619. "Epoch 2/5\n",
  4620. "291/291 [==============================] - 0s 1ms/step - loss: 5.0263 - val_loss: 4.8897\n",
  4621. "Epoch 3/5\n",
  4622. "291/291 [==============================] - 0s 1ms/step - loss: 4.4077 - val_loss: 4.2781\n",
  4623. "Epoch 4/5\n",
  4624. "291/291 [==============================] - 0s 1ms/step - loss: 3.8456 - val_loss: 3.7189\n",
  4625. "Epoch 5/5\n",
  4626. "291/291 [==============================] - 0s 1ms/step - loss: 3.3284 - val_loss: 3.2043\n",
  4627. "73/73 [==============================] - 0s 834us/step - loss: 2.9475\n",
  4628. "Epoch 1/5\n",
  4629. "291/291 [==============================] - 0s 1ms/step - loss: 5.5883 - val_loss: 5.4816\n",
  4630. "Epoch 2/5\n",
  4631. "291/291 [==============================] - 0s 1ms/step - loss: 4.8871 - val_loss: 4.8369\n",
  4632. "Epoch 3/5\n",
  4633. "291/291 [==============================] - 0s 1ms/step - loss: 4.3170 - val_loss: 4.2917\n",
  4634. "Epoch 4/5\n",
  4635. "291/291 [==============================] - 0s 1ms/step - loss: 3.8244 - val_loss: 3.8086\n",
  4636. "Epoch 5/5\n",
  4637. "291/291 [==============================] - 0s 1ms/step - loss: 3.3839 - val_loss: 3.3738\n",
  4638. "73/73 [==============================] - 0s 769us/step - loss: 3.3017\n",
  4639. "Epoch 1/5\n",
  4640. "291/291 [==============================] - 0s 1ms/step - loss: 2.3146 - val_loss: 1.1778\n",
  4641. "Epoch 2/5\n",
  4642. "291/291 [==============================] - 0s 1ms/step - loss: 0.8873 - val_loss: 0.7858\n",
  4643. "Epoch 3/5\n",
  4644. "291/291 [==============================] - 0s 1ms/step - loss: 0.6941 - val_loss: 0.7040\n",
  4645. "Epoch 4/5\n",
  4646. "291/291 [==============================] - 0s 1ms/step - loss: 0.6462 - val_loss: 0.6728\n",
  4647. "Epoch 5/5\n",
  4648. "291/291 [==============================] - 0s 1ms/step - loss: 0.6211 - val_loss: 0.6479\n",
  4649. "73/73 [==============================] - 0s 794us/step - loss: 0.5664\n",
  4650. "Epoch 1/5\n",
  4651. "291/291 [==============================] - 0s 1ms/step - loss: 2.5020 - val_loss: 1.0313\n",
  4652. "Epoch 2/5\n",
  4653. "291/291 [==============================] - 0s 1ms/step - loss: 0.8088 - val_loss: 0.7115\n",
  4654. "Epoch 3/5\n",
  4655. "291/291 [==============================] - 0s 1ms/step - loss: 0.6434 - val_loss: 0.6479\n",
  4656. "Epoch 4/5\n",
  4657. "291/291 [==============================] - 0s 1ms/step - loss: 0.5974 - val_loss: 0.6170\n",
  4658. "Epoch 5/5\n",
  4659. "291/291 [==============================] - 0s 1ms/step - loss: 0.5683 - val_loss: 0.5921\n",
  4660. "73/73 [==============================] - 0s 751us/step - loss: 0.5555\n",
  4661. "Epoch 1/5\n",
  4662. "291/291 [==============================] - 0s 1ms/step - loss: 2.0897 - val_loss: 0.8365\n",
  4663. "Epoch 2/5\n",
  4664. "291/291 [==============================] - 0s 1ms/step - loss: 0.6956 - val_loss: 0.6964\n",
  4665. "Epoch 3/5\n",
  4666. "291/291 [==============================] - 0s 1ms/step - loss: 0.6270 - val_loss: 0.6552\n",
  4667. "Epoch 4/5\n",
  4668. "291/291 [==============================] - 0s 1ms/step - loss: 0.5952 - val_loss: 0.6269\n",
  4669. "Epoch 5/5\n",
  4670. "291/291 [==============================] - 0s 1ms/step - loss: 0.5708 - val_loss: 0.6030\n",
  4671. "73/73 [==============================] - 0s 828us/step - loss: 0.5542\n",
  4672. "Epoch 1/5\n",
  4673. "291/291 [==============================] - 0s 1ms/step - loss: 3.1561 - val_loss: 1.2809\n",
  4674. "Epoch 2/5\n",
  4675. "291/291 [==============================] - 0s 1ms/step - loss: 1.0264 - val_loss: 0.8961\n",
  4676. "Epoch 3/5\n",
  4677. "291/291 [==============================] - 0s 1ms/step - loss: 0.7903 - val_loss: 0.7505\n",
  4678. "Epoch 4/5\n",
  4679. "291/291 [==============================] - 0s 1ms/step - loss: 0.6835 - val_loss: 0.6897\n",
  4680. "Epoch 5/5\n",
  4681. "291/291 [==============================] - 0s 1ms/step - loss: 0.6280 - val_loss: 0.6581\n",
  4682. "73/73 [==============================] - 0s 837us/step - loss: 0.6230\n",
  4683. "Epoch 1/5\n",
  4684. "291/291 [==============================] - 0s 1ms/step - loss: 1.9692 - val_loss: 0.9936\n",
  4685. "Epoch 2/5\n",
  4686. "291/291 [==============================] - 0s 1ms/step - loss: 0.7718 - val_loss: 0.7343\n",
  4687. "Epoch 3/5\n",
  4688. "291/291 [==============================] - 0s 1ms/step - loss: 0.6364 - val_loss: 0.6621\n",
  4689. "Epoch 4/5\n",
  4690. "291/291 [==============================] - 0s 1ms/step - loss: 0.5856 - val_loss: 0.6187\n",
  4691. "Epoch 5/5\n",
  4692. "291/291 [==============================] - 0s 1ms/step - loss: 0.5528 - val_loss: 0.5922\n",
  4693. "73/73 [==============================] - 0s 778us/step - loss: 0.5798\n",
  4694. "Epoch 1/5\n",
  4695. "291/291 [==============================] - 0s 1ms/step - loss: 1.0973 - val_loss: 0.6307\n",
  4696. "Epoch 2/5\n",
  4697. "291/291 [==============================] - 0s 1ms/step - loss: 0.5624 - val_loss: 0.5529\n",
  4698. "Epoch 3/5\n",
  4699. "291/291 [==============================] - 0s 1ms/step - loss: 0.5066 - val_loss: 0.5035\n",
  4700. "Epoch 4/5\n",
  4701. "291/291 [==============================] - 0s 1ms/step - loss: 0.4665 - val_loss: 0.4688\n",
  4702. "Epoch 5/5\n",
  4703. "291/291 [==============================] - 0s 1ms/step - loss: 0.4362 - val_loss: 0.4720\n",
  4704. "73/73 [==============================] - 0s 817us/step - loss: 0.4205\n",
  4705. "Epoch 1/5\n",
  4706. "291/291 [==============================] - 0s 1ms/step - loss: 1.0686 - val_loss: 0.6908\n",
  4707. "Epoch 2/5\n",
  4708. "291/291 [==============================] - 0s 1ms/step - loss: 0.5946 - val_loss: 0.5710\n",
  4709. "Epoch 3/5\n",
  4710. "291/291 [==============================] - 0s 1ms/step - loss: 0.5093 - val_loss: 0.5047\n"
  4711. ]
  4712. },
  4713. {
  4714. "name": "stdout",
  4715. "output_type": "stream",
  4716. "text": [
  4717. "Epoch 4/5\n",
  4718. "291/291 [==============================] - 0s 1ms/step - loss: 0.4572 - val_loss: 0.4585\n",
  4719. "Epoch 5/5\n",
  4720. "291/291 [==============================] - 0s 1ms/step - loss: 0.4262 - val_loss: 0.4433\n",
  4721. "73/73 [==============================] - 0s 798us/step - loss: 0.4275\n",
  4722. "Epoch 1/5\n",
  4723. "291/291 [==============================] - 0s 1ms/step - loss: 1.1227 - val_loss: 0.7027\n",
  4724. "Epoch 2/5\n",
  4725. "291/291 [==============================] - 0s 1ms/step - loss: 0.5751 - val_loss: 0.5824\n",
  4726. "Epoch 3/5\n",
  4727. "291/291 [==============================] - 0s 1ms/step - loss: 0.4987 - val_loss: 0.5099\n",
  4728. "Epoch 4/5\n",
  4729. "291/291 [==============================] - 0s 1ms/step - loss: 0.4569 - val_loss: 0.4724\n",
  4730. "Epoch 5/5\n",
  4731. "291/291 [==============================] - 0s 1ms/step - loss: 0.4282 - val_loss: 0.4401\n",
  4732. "73/73 [==============================] - 0s 779us/step - loss: 0.4202\n",
  4733. "Epoch 1/5\n",
  4734. "291/291 [==============================] - 0s 1ms/step - loss: 1.3362 - val_loss: 0.6359\n",
  4735. "Epoch 2/5\n",
  4736. "291/291 [==============================] - 0s 1ms/step - loss: 0.5624 - val_loss: 0.5943\n",
  4737. "Epoch 3/5\n",
  4738. "291/291 [==============================] - 0s 1ms/step - loss: 0.5819 - val_loss: 0.5202\n",
  4739. "Epoch 4/5\n",
  4740. "291/291 [==============================] - 0s 1ms/step - loss: 0.4860 - val_loss: 0.4772\n",
  4741. "Epoch 5/5\n",
  4742. "291/291 [==============================] - 0s 1ms/step - loss: 0.4429 - val_loss: 0.4553\n",
  4743. "73/73 [==============================] - 0s 896us/step - loss: 0.4583\n",
  4744. "Epoch 1/5\n",
  4745. "291/291 [==============================] - 0s 1ms/step - loss: 1.0628 - val_loss: 0.6888\n",
  4746. "Epoch 2/5\n",
  4747. "291/291 [==============================] - 0s 1ms/step - loss: 0.5495 - val_loss: 0.5319\n",
  4748. "Epoch 3/5\n",
  4749. "291/291 [==============================] - 0s 1ms/step - loss: 0.4531 - val_loss: 0.4723\n",
  4750. "Epoch 4/5\n",
  4751. "291/291 [==============================] - 0s 1ms/step - loss: 0.4088 - val_loss: 0.4239\n",
  4752. "Epoch 5/5\n",
  4753. "291/291 [==============================] - 0s 1ms/step - loss: 0.3857 - val_loss: 0.4092\n",
  4754. "73/73 [==============================] - 0s 782us/step - loss: 0.4066\n",
  4755. "Epoch 1/5\n",
  4756. "291/291 [==============================] - 0s 1ms/step - loss: 0.9645 - val_loss: 0.5424\n",
  4757. "Epoch 2/5\n",
  4758. "291/291 [==============================] - 0s 1ms/step - loss: 0.4844 - val_loss: 0.4623\n",
  4759. "Epoch 3/5\n",
  4760. "291/291 [==============================] - 0s 1ms/step - loss: 0.4253 - val_loss: 0.4155\n",
  4761. "Epoch 4/5\n",
  4762. "291/291 [==============================] - 0s 1ms/step - loss: 0.3982 - val_loss: 0.4078\n",
  4763. "Epoch 5/5\n",
  4764. "291/291 [==============================] - 0s 1ms/step - loss: 0.3823 - val_loss: 0.4521\n",
  4765. "73/73 [==============================] - 0s 746us/step - loss: 0.4314\n",
  4766. "Epoch 1/5\n",
  4767. "291/291 [==============================] - 0s 1ms/step - loss: 0.9178 - val_loss: 0.5626\n",
  4768. "Epoch 2/5\n",
  4769. "291/291 [==============================] - 0s 1ms/step - loss: 0.4787 - val_loss: 0.4673\n",
  4770. "Epoch 3/5\n",
  4771. "291/291 [==============================] - 0s 1ms/step - loss: 0.4256 - val_loss: 0.4214\n",
  4772. "Epoch 4/5\n",
  4773. "291/291 [==============================] - 0s 1ms/step - loss: 0.3978 - val_loss: 0.3964\n",
  4774. "Epoch 5/5\n",
  4775. "291/291 [==============================] - 0s 1ms/step - loss: 0.3774 - val_loss: 0.3914\n",
  4776. "73/73 [==============================] - 0s 757us/step - loss: 0.3849\n",
  4777. "Epoch 1/5\n",
  4778. "291/291 [==============================] - 0s 1ms/step - loss: 0.7389 - val_loss: 0.5297\n",
  4779. "Epoch 2/5\n",
  4780. "291/291 [==============================] - 0s 1ms/step - loss: 0.4580 - val_loss: 0.4932\n",
  4781. "Epoch 3/5\n",
  4782. "291/291 [==============================] - 0s 1ms/step - loss: 0.4165 - val_loss: 0.4136\n",
  4783. "Epoch 4/5\n",
  4784. "291/291 [==============================] - 0s 1ms/step - loss: 0.3978 - val_loss: 0.4246\n",
  4785. "Epoch 5/5\n",
  4786. "291/291 [==============================] - 0s 1ms/step - loss: 0.3865 - val_loss: 0.4346\n",
  4787. "73/73 [==============================] - 0s 750us/step - loss: 0.4057\n",
  4788. "Epoch 1/5\n",
  4789. "291/291 [==============================] - 0s 1ms/step - loss: 0.8434 - val_loss: 0.5559\n",
  4790. "Epoch 2/5\n",
  4791. "291/291 [==============================] - 0s 1ms/step - loss: 0.4721 - val_loss: 0.4607\n",
  4792. "Epoch 3/5\n",
  4793. "291/291 [==============================] - 0s 1ms/step - loss: 0.4149 - val_loss: 0.4158\n",
  4794. "Epoch 4/5\n",
  4795. "291/291 [==============================] - 0s 1ms/step - loss: 0.3888 - val_loss: 0.4170\n",
  4796. "Epoch 5/5\n",
  4797. "291/291 [==============================] - 0s 1ms/step - loss: 0.3753 - val_loss: 0.3879\n",
  4798. "73/73 [==============================] - 0s 765us/step - loss: 0.3910\n",
  4799. "Epoch 1/5\n",
  4800. "291/291 [==============================] - 0s 1ms/step - loss: 0.8391 - val_loss: 0.5323\n",
  4801. "Epoch 2/5\n",
  4802. "291/291 [==============================] - 0s 1ms/step - loss: 0.4518 - val_loss: 0.4936\n",
  4803. "Epoch 3/5\n",
  4804. "291/291 [==============================] - 0s 1ms/step - loss: 0.4083 - val_loss: 0.4271\n",
  4805. "Epoch 4/5\n",
  4806. "291/291 [==============================] - 0s 1ms/step - loss: 0.3850 - val_loss: 0.4405\n",
  4807. "Epoch 5/5\n",
  4808. "291/291 [==============================] - 0s 1ms/step - loss: 0.3731 - val_loss: 0.3860\n",
  4809. "73/73 [==============================] - 0s 749us/step - loss: 0.3935\n",
  4810. "Epoch 1/5\n",
  4811. "363/363 [==============================] - 0s 1ms/step - loss: 0.6946 - val_loss: 0.5043\n",
  4812. "Epoch 2/5\n",
  4813. "363/363 [==============================] - 0s 1ms/step - loss: 0.4602 - val_loss: 0.4307\n",
  4814. "Epoch 3/5\n",
  4815. "363/363 [==============================] - 0s 1ms/step - loss: 0.4124 - val_loss: 0.4057\n",
  4816. "Epoch 4/5\n",
  4817. "363/363 [==============================] - 0s 1ms/step - loss: 0.3900 - val_loss: 0.3942\n",
  4818. "Epoch 5/5\n",
  4819. "363/363 [==============================] - 0s 1ms/step - loss: 0.3799 - val_loss: 0.3794\n"
  4820. ]
  4821. },
  4822. {
  4823. "data": {
  4824. "text/plain": [
  4825. "GridSearchCV(estimator=<tensorflow.python.keras.wrappers.scikit_learn.KerasRegressor object at 0x7f73c6ff9048>,\n",
  4826. " param_grid={'hidden_layers': [1, 2, 3, 4],\n",
  4827. " 'layer_size': [5, 10, 20, 30],\n",
  4828. " 'learning_rate': [0.0001, 5e-05, 0.001, 0.005, 0.01]})"
  4829. ]
  4830. },
  4831. "execution_count": 7,
  4832. "metadata": {},
  4833. "output_type": "execute_result"
  4834. }
  4835. ],
  4836. "source": [
  4837. "#scipy也是sk中的\n",
  4838. "from scipy.stats import reciprocal\n",
  4839. "# 分布函数\n",
  4840. "# f(x) = 1/(x*log(b/a)) a <= x <= b\n",
  4841. "\n",
  4842. "#sk 0.21.3版本可以用这种列表\n",
  4843. "# param_distribution = {\n",
  4844. "# \"hidden_layers\":[1, 2, 3, 4],\n",
  4845. "# \"layer_size\": np.arange(1, 100),\n",
  4846. "# \"learning_rate\": reciprocal(1e-4, 1e-2),\n",
  4847. "# }\n",
  4848. "#最新版本只能用普通列表\n",
  4849. "param_distribution = {\n",
  4850. " \"hidden_layers\": [1, 2, 3, 4],\n",
  4851. " \"layer_size\": [5, 10, 20, 30],\n",
  4852. " \"learning_rate\": [1e-4, 5e-5, 1e-3, 5e-3, 1e-2],\n",
  4853. "}\n",
  4854. "\n",
  4855. "from sklearn.model_selection import RandomizedSearchCV,GridSearchCV\n",
  4856. "\n",
  4857. "#随机搜索\n",
  4858. "# random_search_cv = RandomizedSearchCV(sklearn_model,\n",
  4859. "# param_distribution)\n",
  4860. "grid_search_cv =GridSearchCV(sklearn_model,param_distribution)\n",
  4861. "# random_search_cv.fit(x_train_scaled, y_train, epochs = 5,\n",
  4862. "# validation_data = (x_valid_scaled, y_valid),\n",
  4863. "# callbacks = callbacks)\n",
  4864. "\n",
  4865. "grid_search_cv.fit(x_train_scaled, y_train, epochs = 5,\n",
  4866. " validation_data = (x_valid_scaled, y_valid),\n",
  4867. " callbacks = callbacks)\n",
  4868. "# cross_validation: 训练集分成n份,n-1训练,最后一份验证."
  4869. ]
  4870. },
  4871. {
  4872. "cell_type": "code",
  4873. "execution_count": 8,
  4874. "metadata": {},
  4875. "outputs": [
  4876. {
  4877. "name": "stdout",
  4878. "output_type": "stream",
  4879. "text": [
  4880. "{'hidden_layers': 3, 'layer_size': 30, 'learning_rate': 0.01}\n",
  4881. "-0.3978272318840027\n",
  4882. "<tensorflow.python.keras.wrappers.scikit_learn.KerasRegressor object at 0x7f738bb95400>\n"
  4883. ]
  4884. }
  4885. ],
  4886. "source": [
  4887. "# print(random_search_cv.best_params_)\n",
  4888. "# print(random_search_cv.best_score_)\n",
  4889. "# print(random_search_cv.best_estimator_)\n",
  4890. "\n",
  4891. "print(grid_search_cv.best_params_)\n",
  4892. "print(grid_search_cv.best_score_)\n",
  4893. "print(grid_search_cv.best_estimator_)"
  4894. ]
  4895. },
  4896. {
  4897. "cell_type": "code",
  4898. "execution_count": 9,
  4899. "metadata": {},
  4900. "outputs": [
  4901. {
  4902. "name": "stdout",
  4903. "output_type": "stream",
  4904. "text": [
  4905. "162/162 [==============================] - 0s 770us/step - loss: 0.3876\n"
  4906. ]
  4907. },
  4908. {
  4909. "data": {
  4910. "text/plain": [
  4911. "0.38763508200645447"
  4912. ]
  4913. },
  4914. "execution_count": 9,
  4915. "metadata": {},
  4916. "output_type": "execute_result"
  4917. }
  4918. ],
  4919. "source": [
  4920. "#拿最佳的模型\n",
  4921. "# model = random_search_cv.best_estimator_.model\n",
  4922. "\n",
  4923. "# model.evaluate(x_test_scaled, y_test)\n",
  4924. "\n",
  4925. "model = grid_search_cv.best_estimator_.model\n",
  4926. "model.evaluate(x_test_scaled, y_test)"
  4927. ]
  4928. },
  4929. {
  4930. "cell_type": "code",
  4931. "execution_count": null,
  4932. "metadata": {},
  4933. "outputs": [],
  4934. "source": []
  4935. }
  4936. ],
  4937. "metadata": {
  4938. "kernelspec": {
  4939. "display_name": "Python 3",
  4940. "language": "python",
  4941. "name": "python3"
  4942. },
  4943. "language_info": {
  4944. "codemirror_mode": {
  4945. "name": "ipython",
  4946. "version": 3
  4947. },
  4948. "file_extension": ".py",
  4949. "mimetype": "text/x-python",
  4950. "name": "python",
  4951. "nbconvert_exporter": "python",
  4952. "pygments_lexer": "ipython3",
  4953. "version": "3.6.9"
  4954. }
  4955. },
  4956. "nbformat": 4,
  4957. "nbformat_minor": 2
  4958. }

随着人工智能和大数据的发展,任一方面对自动化工具有着一定的需求,在当下疫情防控期间,使用mindspore来实现yolo模型来进行目标检测及语义分割,对视频或图片都可以进行口罩佩戴检测和行人社交距离检测,来对公共场所的疫情防控来实行自动化管理。