diff --git a/inference/images/0062.jpg b/inference/images/0062.jpg index 5eca27e..a10f684 100644 Binary files a/inference/images/0062.jpg and b/inference/images/0062.jpg differ diff --git a/inference/images/02720.jpg b/inference/images/02720.jpg index b76389e..d4161ea 100644 Binary files a/inference/images/02720.jpg and b/inference/images/02720.jpg differ diff --git a/inference/images/bus.jpg b/inference/images/bus.jpg index b43e311..32223b5 100644 Binary files a/inference/images/bus.jpg and b/inference/images/bus.jpg differ diff --git a/inference/images/children.jpg b/inference/images/children.jpg index 296ab40..3b3ae85 100644 Binary files a/inference/images/children.jpg and b/inference/images/children.jpg differ diff --git a/inference/images/mask.jpeg b/inference/images/mask.jpeg index 7a62265..ea665c2 100644 Binary files a/inference/images/mask.jpeg and b/inference/images/mask.jpeg differ diff --git a/inference/output/mask/0062.jpg b/inference/output/mask/0062.jpg index 40925f5..f87ea70 100644 Binary files a/inference/output/mask/0062.jpg and b/inference/output/mask/0062.jpg differ diff --git a/inference/output/mask/02720.jpg b/inference/output/mask/02720.jpg index 0b1d166..7aef53d 100644 Binary files a/inference/output/mask/02720.jpg and b/inference/output/mask/02720.jpg differ diff --git a/inference/output/mask/bus.jpg b/inference/output/mask/bus.jpg index a5ba140..1edd896 100644 Binary files a/inference/output/mask/bus.jpg and b/inference/output/mask/bus.jpg differ diff --git a/inference/output/mask/children.jpg b/inference/output/mask/children.jpg index 6d1f460..097cf4d 100644 Binary files a/inference/output/mask/children.jpg and b/inference/output/mask/children.jpg differ diff --git a/inference/output/mask/mask.jpeg b/inference/output/mask/mask.jpeg index 502cc2b..f6a04ba 100644 Binary files a/inference/output/mask/mask.jpeg and b/inference/output/mask/mask.jpeg differ diff --git a/log.txt b/log.txt deleted file mode 100644 index e69de29..0000000 diff --git a/question_linjie.md b/question_linjie.md deleted file mode 100644 index 8750dd8..0000000 --- a/question_linjie.md +++ /dev/null @@ -1,2 +0,0 @@ -1、解决OMP: Error #15: Initializing libiomp5.dylib, but found libiomp5.dylib already initialized. #1715 -https://github.com/dmlc/xgboost/issues/1715(you can run this comment conda install nomkl) \ No newline at end of file diff --git a/tf02_data_generate_csv.ipynb b/tf02_data_generate_csv.ipynb deleted file mode 100644 index ad4d35b..0000000 --- a/tf02_data_generate_csv.ipynb +++ /dev/null @@ -1,853 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.2.0\n", - "sys.version_info(major=3, minor=6, micro=9, releaselevel='final', serial=0)\n", - "matplotlib 3.3.4\n", - "numpy 1.19.5\n", - "pandas 1.1.5\n", - "sklearn 0.24.2\n", - "tensorflow 2.2.0\n", - "tensorflow.keras 2.3.0-tf\n" - ] - } - ], - "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "import numpy as np\n", - "import sklearn\n", - "import pandas as pd\n", - "import os\n", - "import sys\n", - "import time\n", - "import tensorflow as tf\n", - "\n", - "from tensorflow import keras\n", - "\n", - "print(tf.__version__)\n", - "print(sys.version_info)\n", - "for module in mpl, np, pd, sklearn, tf, keras:\n", - " print(module.__name__, module.__version__)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "from sklearn.datasets import fetch_california_housing\n", - "\n", - "housing = fetch_california_housing()" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(11610, 8) (11610,)\n", - "(3870, 8) (3870,)\n", - "(5160, 8) (5160,)\n" - ] - } - ], - "source": [ - "from sklearn.model_selection import train_test_split\n", - "\n", - "x_train_all, x_test, y_train_all, y_test = train_test_split(\n", - " housing.data, housing.target, random_state = 7)\n", - "x_train, x_valid, y_train, y_valid = train_test_split(\n", - " x_train_all, y_train_all, random_state = 11)\n", - "print(x_train.shape, y_train.shape)\n", - "print(x_valid.shape, y_valid.shape)\n", - "print(x_test.shape, y_test.shape)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from sklearn.preprocessing import StandardScaler\n", - "\n", - "scaler = StandardScaler()\n", - "x_train_scaled = scaler.fit_transform(x_train)\n", - "x_valid_scaled = scaler.transform(x_valid)\n", - "x_test_scaled = scaler.transform(x_test)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "!rm -rf generate_csv" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tf01-dataset_basic_api.ipynb tf03-tfrecord_basic_api.ipynb\r\n", - "tf02_data_generate_csv.ipynb tf04_data_generate_tfrecord.ipynb\r\n" - ] - } - ], - "source": [ - "!ls" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "numpy.ndarray" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type(x_train_scaled)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['.ipynb_checkpoints',\n", - " 'tf02_data_generate_csv.ipynb',\n", - " 'tf04_data_generate_tfrecord.ipynb',\n", - " 'tf03-tfrecord_basic_api.ipynb',\n", - " 'tf01-dataset_basic_api.ipynb']" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "os.listdir()" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0 [0 1 2 3 4]\n", - "1 [5 6 7 8 9]\n", - "2 [10 11 12 13 14]\n", - "3 [15 16 17 18 19]\n" - ] - } - ], - "source": [ - "#为了把数据分好\n", - "for file_idx, row_indices in enumerate(np.array_split(np.arange(20), 4)):\n", - " print(file_idx,row_indices)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "MedInc,HouseAge,AveRooms,AveBedrms,Population,AveOccup,Latitude,Longitude,MidianHouseValue\n", - "--------------------------------------------------\n" - ] - } - ], - "source": [ - "#下面要把特征工程后的数据存为csv文件\n", - "output_dir = \"generate_csv\"\n", - "if not os.path.exists(output_dir):\n", - " os.mkdir(output_dir)\n", - "\n", - "#save_to_csv是工作可以直接复用的\n", - "def save_to_csv(output_dir, data, name_prefix,\n", - " header=None, n_parts=10):\n", - " #生成文件名 格式generate_csv/{}_{:02d}.csv\n", - " path_format = os.path.join(output_dir, \"{}_{:02d}.csv\") \n", - " filenames = []\n", - " #把数据分为n_parts部分,写到文件中去\n", - " for file_idx, row_indices in enumerate(\n", - " np.array_split(np.arange(len(data)), n_parts)):\n", - " #print(file_idx,row_indices)\n", - " #生成子文件名\n", - " part_csv = path_format.format(name_prefix, file_idx)\n", - " filenames.append(part_csv) #文件名添加到列表\n", - " with open(part_csv, \"w\", encoding=\"utf-8\") as f:\n", - " #先写头部\n", - " if header is not None:\n", - " f.write(header + \"\\n\")\n", - " for row_index in row_indices:\n", - " #把字符串化后的每个字符串用逗号拼接起来\n", - " f.write(\",\".join(\n", - " [repr(col) for col in data[row_index]]))\n", - " f.write('\\n')\n", - " return filenames\n", - "#np.c_把x和y合并起来,按轴1合并\n", - "train_data = np.c_[x_train_scaled, y_train]\n", - "valid_data = np.c_[x_valid_scaled, y_valid]\n", - "test_data = np.c_[x_test_scaled, y_test]\n", - "#头部,特征,也有目标\n", - "header_cols = housing.feature_names + [\"MidianHouseValue\"]\n", - "#把列表变为字符串\n", - "header_str = \",\".join(header_cols)\n", - "print(header_str)\n", - "print('-'*50)\n", - "train_filenames = save_to_csv(output_dir, train_data, \"train\",\n", - " header_str, n_parts=20)\n", - "valid_filenames = save_to_csv(output_dir, valid_data, \"valid\",\n", - " header_str, n_parts=10)\n", - "test_filenames = save_to_csv(output_dir, test_data, \"test\",\n", - " header_str, n_parts=10)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "temp_array=np.array([[1,2,3],[4,5,6]])\n", - "np.savetxt(\"temp.csv\",temp_array) #savetxt会自动将整型数或者浮点数转为字符串存储" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1.000000000000000000e+00 2.000000000000000000e+00 3.000000000000000000e+00\r\n", - "4.000000000000000000e+00 5.000000000000000000e+00 6.000000000000000000e+00\r\n" - ] - } - ], - "source": [ - "!cat temp.csv" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['generate_csv/train_00.csv', 'generate_csv/train_01.csv', 'generate_csv/train_02.csv', 'generate_csv/train_03.csv', 'generate_csv/train_04.csv', 'generate_csv/train_05.csv', 'generate_csv/train_06.csv', 'generate_csv/train_07.csv', 'generate_csv/train_08.csv', 'generate_csv/train_09.csv', 'generate_csv/train_10.csv', 'generate_csv/train_11.csv', 'generate_csv/train_12.csv', 'generate_csv/train_13.csv', 'generate_csv/train_14.csv', 'generate_csv/train_15.csv', 'generate_csv/train_16.csv', 'generate_csv/train_17.csv', 'generate_csv/train_18.csv', 'generate_csv/train_19.csv']\n", - "train filenames:\n", - "['generate_csv/train_00.csv',\n", - " 'generate_csv/train_01.csv',\n", - " 'generate_csv/train_02.csv',\n", - " 'generate_csv/train_03.csv',\n", - " 'generate_csv/train_04.csv',\n", - " 'generate_csv/train_05.csv',\n", - " 'generate_csv/train_06.csv',\n", - " 'generate_csv/train_07.csv',\n", - " 'generate_csv/train_08.csv',\n", - " 'generate_csv/train_09.csv',\n", - " 'generate_csv/train_10.csv',\n", - " 'generate_csv/train_11.csv',\n", - " 'generate_csv/train_12.csv',\n", - " 'generate_csv/train_13.csv',\n", - " 'generate_csv/train_14.csv',\n", - " 'generate_csv/train_15.csv',\n", - " 'generate_csv/train_16.csv',\n", - " 'generate_csv/train_17.csv',\n", - " 'generate_csv/train_18.csv',\n", - " 'generate_csv/train_19.csv']\n", - "valid filenames:\n", - "['generate_csv/valid_00.csv',\n", - " 'generate_csv/valid_01.csv',\n", - " 'generate_csv/valid_02.csv',\n", - " 'generate_csv/valid_03.csv',\n", - " 'generate_csv/valid_04.csv',\n", - " 'generate_csv/valid_05.csv',\n", - " 'generate_csv/valid_06.csv',\n", - " 'generate_csv/valid_07.csv',\n", - " 'generate_csv/valid_08.csv',\n", - " 'generate_csv/valid_09.csv']\n", - "test filenames:\n", - "['generate_csv/test_00.csv',\n", - " 'generate_csv/test_01.csv',\n", - " 'generate_csv/test_02.csv',\n", - " 'generate_csv/test_03.csv',\n", - " 'generate_csv/test_04.csv',\n", - " 'generate_csv/test_05.csv',\n", - " 'generate_csv/test_06.csv',\n", - " 'generate_csv/test_07.csv',\n", - " 'generate_csv/test_08.csv',\n", - " 'generate_csv/test_09.csv']\n" - ] - } - ], - "source": [ - "#看下生成文件的文件名\n", - "print(train_filenames)\n", - "import pprint #为了打印美观性\n", - "print(\"train filenames:\")\n", - "pprint.pprint(train_filenames)\n", - "print(\"valid filenames:\")\n", - "pprint.pprint(valid_filenames)\n", - "print(\"test filenames:\")\n", - "pprint.pprint(test_filenames)" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tf.Tensor(b'generate_csv/train_13.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_01.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_14.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_11.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_12.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_06.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_15.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_10.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_05.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_02.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_00.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_07.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_16.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_09.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_19.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_03.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_04.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_18.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_17.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_08.csv', shape=(), dtype=string)\n" - ] - } - ], - "source": [ - "# 1. filename -> dataset\n", - "# 2. read file -> dataset -> datasets -> merge\n", - "# 3. parse csv\n", - "#list_files把文件名搞为一个dataset\n", - "# list_files默认行为是按不确定的随机混排顺序返回文件名\n", - "filename_dataset = tf.data.Dataset.list_files(train_filenames)\n", - "for filename in filename_dataset:\n", - " print(filename)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tf.Tensor(b'generate_csv/train_00.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_01.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_02.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_03.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_04.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_05.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_06.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_07.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_08.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_09.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_10.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_11.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_12.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_13.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_14.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_15.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_16.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_17.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_18.csv', shape=(), dtype=string)\n", - "tf.Tensor(b'generate_csv/train_19.csv', shape=(), dtype=string)\n" - ] - } - ], - "source": [ - "filename_mydataset=tf.data.Dataset.from_tensor_slices(train_filenames)\n", - "filename_mydataset=filename_mydataset.repeat(1)\n", - "for i in filename_mydataset:\n", - " print(i)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# 把数据从文件中拿出来" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": { - "scrolled": false - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "tf.Tensor(b'0.801544314532886,0.27216142415910205,-0.11624392696666119,-0.2023115137272354,-0.5430515742518128,-0.021039615516440048,-0.5897620622908205,-0.08241845654707416,3.226', shape=(), dtype=string)\n", - "tf.Tensor(b'-0.2980728090942217,0.3522616607867429,-0.10920507530549702,-0.25055520947444,-0.034064024638222286,-0.006034004264459185,1.080554840130013,-1.0611381656679573,1.514', shape=(), dtype=string)\n", - "tf.Tensor(b'0.8115083791797953,-0.04823952235146133,0.5187339067174729,-0.029386394873127775,-0.034064024638222286,-0.05081594842905086,-0.7157356834231196,0.9162751241885168,2.147', shape=(), dtype=string)\n", - "tf.Tensor(b'-0.6906143291679195,-0.1283397589791022,7.0201810347470595,5.624287386169439,-0.2663292879200034,-0.03662080416157129,-0.6457503383496215,1.2058962626018372,1.352', shape=(), dtype=string)\n", - "tf.Tensor(b'0.401276648075221,-0.9293421252555106,-0.05333050451405854,-0.1865945262276826,0.6545661895448709,0.026434465728210874,0.9312527706398824,-1.4406417263474771,2.512', shape=(), dtype=string)\n", - "tf.Tensor(b'-0.8757754235423053,1.874166156711919,-0.9487499555702599,-0.09657184824705009,-0.7163432355284542,-0.07790191228558485,0.9825753570271144,-1.4206678547327694,2.75', shape=(), dtype=string)\n", - "tf.Tensor(b'0.15782311132800697,0.43236189741438374,0.3379948076652917,-0.015880306122244434,-0.3733890577139493,-0.05305245634489608,0.8006134598360177,-1.2359095422966828,3.169', shape=(), dtype=string)\n", - "tf.Tensor(b'2.2878417437355094,-1.8905449647872008,0.6607106467795992,-0.14964778023694128,-0.06672632728722275,0.44788055801575993,-0.5337737862320228,0.5667323709310584,3.59', shape=(), dtype=string)\n", - "tf.Tensor(b'-1.0591781535672364,1.393564736946074,-0.026331968874673636,-0.11006759528831847,-0.6138198966579805,-0.09695934953589447,0.3247131133362288,-0.037477245413977976,0.672', shape=(), dtype=string)\n", - "tf.Tensor(b'-0.2223565745313433,1.393564736946074,0.02991299565857307,0.0801452044790158,-0.509481985418118,-0.06238599304952824,-0.86503775291325,0.8613469772480595,2.0', shape=(), dtype=string)\n", - "tf.Tensor(b'-0.03058829290446139,-0.9293421252555106,0.2596214817762415,-0.00601274044096368,-0.5004091235711734,-0.030779867916061836,1.5984463936739026,-1.8151518191233238,1.598', shape=(), dtype=string)\n", - "tf.Tensor(b'1.9063832474401923,0.5124621340420246,0.44758280183798754,-0.276721775345798,-0.6310583341671753,-0.07081146722873086,-0.7064043040799849,0.7464972154634646,5.00001', shape=(), dtype=string)\n", - "tf.Tensor(b'-0.9868720801669367,0.832863080552588,-0.18684708416901633,-0.14888949288707784,-0.4532302419670616,-0.11504995754593579,1.6730974284189664,-0.7465496877362412,1.138', shape=(), dtype=string)\n", - "tf.Tensor(b'0.29422955783115173,1.874166156711919,0.004626028663628252,-0.28479278487900694,-0.5602900117610076,-0.1196496378702887,1.3558305307524392,-0.9512818717870428,1.625', shape=(), dtype=string)\n", - "tf.Tensor(b'0.7751155655229017,1.874166156711919,0.15645971958808144,-0.18905190538070707,-0.6292437617977863,-0.08791603438866835,-0.7483955111240856,0.5717258388347319,4.851', shape=(), dtype=string)\n" - ] - } - ], - "source": [ - "#一访问list_files的dataset对象就随机了文件顺序\n", - "# for filename in filename_dataset:\n", - "# print(filename)\n", - "n_readers = 5\n", - "dataset = filename_mydataset.interleave(\n", - " #前面1行是header\n", - "# lambda filename: tf.data.TextLineDataset(filename),\n", - " #不带header,把特征名字去掉\n", - " lambda filename: tf.data.TextLineDataset(filename).skip(1),\n", - " cycle_length = n_readers, #cycle_length和block_length增加获取了数据的随机性\n", - " block_length=2\n", - ")\n", - "for line in dataset.take(15):\n", - " print(line)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# 把每一行数据切分为对应类型" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[, , , , ]\n" - ] - } - ], - "source": [ - "#parse csv 解析csv,通过decode_csv\n", - "# tf.io.decode_csv(str, record_defaults)\n", - "\n", - "sample_str = '1,2,3,4,5'\n", - "record_defaults = [\n", - " tf.constant(0, dtype=tf.int32),\n", - " 0,\n", - " np.nan,\n", - " \"hello1\",\n", - " tf.constant([])#没有固定类型,默认是float32\n", - "]\n", - "#sample_str数据格式化,按照record_defaults进行处理\n", - "parsed_fields = tf.io.decode_csv(sample_str, record_defaults)\n", - "print(parsed_fields)" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[,\n", - " ,\n", - " ,\n", - " ,\n", - " ]" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "#我们传一个空的字符串测试\n", - "#最后一个为1是可以转换的\n", - "try:\n", - " parsed_fields = tf.io.decode_csv(',,,,1', record_defaults)\n", - "except tf.errors.InvalidArgumentError as ex:\n", - " print(ex)\n", - "parsed_fields" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Expect 5 fields but have 7 in record 0 [Op:DecodeCSV]\n" - ] - } - ], - "source": [ - "#我们给的值过多的情况\n", - "try:\n", - " parsed_fields = tf.io.decode_csv('1,2,3,4,5,6,', record_defaults)\n", - "except tf.errors.InvalidArgumentError as ex:\n", - " print(ex)" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(,\n", - " )" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "#解析一行\n", - "def parse_csv_line(line, n_fields = 9):\n", - " #先写一个默认的格式,就是9个nan,如果从csv中读取缺失数据,就会变为nan\n", - " defs = [tf.constant(np.nan)] * n_fields\n", - " #使用decode_csv解析\n", - " parsed_fields = tf.io.decode_csv(line, record_defaults=defs)\n", - " #前8个是x,最后一个是y\n", - " x = tf.stack(parsed_fields[0:-1])\n", - " y = tf.stack(parsed_fields[-1:])\n", - " return x, y\n", - "\n", - "parse_csv_line(b'-0.9868720801669367,0.832863080552588,-0.18684708416901633,-0.14888949288707784,-0.4532302419670616,-0.11504995754593579,1.6730974284189664,-0.7465496877362412,1.138',\n", - " n_fields=9)" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": { - "collapsed": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n", - "--------------------------------------------------\n", - "x:\n", - "\n", - "y:\n", - "\n", - "x:\n", - "\n", - "y:\n", - "\n" - ] - } - ], - "source": [ - "# 1. filename -> dataset\n", - "# 2. read file -> dataset -> datasets -> merge\n", - "# 3. parse csv\n", - "#完成整个流程\n", - "def csv_reader_dataset(filenames, n_readers=5,\n", - " batch_size=32, n_parse_threads=5,\n", - " shuffle_buffer_size=10000):\n", - " #把文件名类别变为dataset tensor\n", - " dataset = tf.data.Dataset.list_files(filenames)\n", - " #变为repeat dataset可以让读到最后一个样本时,从新去读第一个样本\n", - " dataset = dataset.repeat()\n", - " dataset = dataset.interleave(\n", - " #skip(1)是因为每个文件存了特征名字,target名字\n", - " lambda filename: tf.data.TextLineDataset(filename).skip(1),\n", - " cycle_length = n_readers\n", - " )\n", - " dataset.shuffle(shuffle_buffer_size) #对数据进行洗牌,混乱\n", - " #map,通过parse_csv_line对数据集进行映射,map只会给函数传递一个参数,这个参数\n", - " #就是dataset中的tensor\n", - " dataset = dataset.map(parse_csv_line,\n", - " num_parallel_calls=n_parse_threads)\n", - " dataset = dataset.batch(batch_size)\n", - " return dataset\n", - "#这里是一个测试,写4是为了大家理解\n", - "train_set = csv_reader_dataset(train_filenames, batch_size=4)\n", - "print(train_set)\n", - "print('-'*50)\n", - "i=0\n", - "#是csv_reader_dataset处理后的结果,\n", - "for x_batch, y_batch in train_set.take(2):\n", - "# i=i+1\n", - " print(\"x:\")\n", - " pprint.pprint(x_batch)\n", - " print(\"y:\")\n", - " pprint.pprint(y_batch)\n", - "# print(i)" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 137 ms, sys: 40.3 ms, total: 177 ms\n", - "Wall time: 160 ms\n" - ] - } - ], - "source": [ - "%%time\n", - "batch_size = 32\n", - "train_set = csv_reader_dataset(train_filenames,\n", - " batch_size = batch_size)\n", - "valid_set = csv_reader_dataset(valid_filenames,\n", - " batch_size = batch_size)\n", - "test_set = csv_reader_dataset(test_filenames,\n", - " batch_size = batch_size)\n", - "\n", - "# print(train_set)\n", - "# print(valid_set)\n", - "# print(test_set)" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 1.1306 - val_loss: 0.9811\n", - "Epoch 2/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 2.4388 - val_loss: 0.5692\n", - "Epoch 3/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.5545 - val_loss: 0.6181\n", - "Epoch 4/100\n", - "348/348 [==============================] - 1s 4ms/step - loss: 0.6097 - val_loss: 0.4497\n", - "Epoch 5/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.4277 - val_loss: 0.4555\n", - "Epoch 6/100\n", - "348/348 [==============================] - 1s 4ms/step - loss: 0.3998 - val_loss: 0.3870\n", - "Epoch 7/100\n", - "348/348 [==============================] - 1s 4ms/step - loss: 0.3889 - val_loss: 0.4119\n", - "Epoch 8/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.3831 - val_loss: 0.3941\n", - "Epoch 9/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.3870 - val_loss: 0.4068\n", - "Epoch 10/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.3689 - val_loss: 0.3801\n", - "Epoch 11/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.3804 - val_loss: 0.3957\n" - ] - } - ], - "source": [ - "#我们知道长度为8\n", - "model = keras.models.Sequential([\n", - " keras.layers.Dense(30, activation='relu',\n", - " input_shape=[8]),\n", - " keras.layers.Dense(1),\n", - "])\n", - "model.compile(loss=\"mean_squared_error\", optimizer=\"sgd\")\n", - "callbacks = [keras.callbacks.EarlyStopping(\n", - " patience=5, min_delta=1e-2)]\n", - "\n", - "#当是BatchDataset,必须制定steps_per_epoch,validation_steps\n", - "history = model.fit(train_set,\n", - " validation_data = valid_set,\n", - " steps_per_epoch = 11160 // batch_size, #每epoch训练的步数\n", - " validation_steps = 3870 // batch_size,\n", - " epochs = 100,\n", - " callbacks = callbacks)" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "161/161 [==============================] - 0s 2ms/step - loss: 0.3995\n" - ] - }, - { - "data": { - "text/plain": [ - "0.39946985244750977" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.evaluate(test_set, steps = 5160 // batch_size)" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[,\n", - " ]" - ] - }, - "execution_count": 37, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "dataset = tf.data.Dataset.range(8)\n", - "dataset = dataset.batch(4) #把tensor组合到一起,就是分了batch\n", - "list(dataset)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tf04_data_generate_tfrecord.ipynb b/tf04_data_generate_tfrecord.ipynb deleted file mode 100644 index 7a8887b..0000000 --- a/tf04_data_generate_tfrecord.ipynb +++ /dev/null @@ -1,1094 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.2.0\n", - "sys.version_info(major=3, minor=6, micro=9, releaselevel='final', serial=0)\n", - "matplotlib 3.3.4\n", - "numpy 1.19.5\n", - "pandas 1.1.5\n", - "sklearn 0.24.2\n", - "tensorflow 2.2.0\n", - "tensorflow.keras 2.3.0-tf\n" - ] - } - ], - "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "import numpy as np\n", - "import sklearn\n", - "import pandas as pd\n", - "import os\n", - "import sys\n", - "import time\n", - "import tensorflow as tf\n", - "\n", - "from tensorflow import keras\n", - "\n", - "print(tf.__version__)\n", - "print(sys.version_info)\n", - "for module in mpl, np, pd, sklearn, tf, keras:\n", - " print(module.__name__, module.__version__)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "test_00.csv test_08.csv train_06.csv train_14.csv valid_02.csv\r\n", - "test_01.csv test_09.csv train_07.csv train_15.csv valid_03.csv\r\n", - "test_02.csv train_00.csv train_08.csv train_16.csv valid_04.csv\r\n", - "test_03.csv train_01.csv train_09.csv train_17.csv valid_05.csv\r\n", - "test_04.csv train_02.csv train_10.csv train_18.csv valid_06.csv\r\n", - "test_05.csv train_03.csv train_11.csv train_19.csv valid_07.csv\r\n", - "test_06.csv train_04.csv train_12.csv valid_00.csv valid_08.csv\r\n", - "test_07.csv train_05.csv train_13.csv valid_01.csv valid_09.csv\r\n" - ] - } - ], - "source": [ - "!ls generate_csv" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['./generate_csv/train_08.csv',\n", - " './generate_csv/train_11.csv',\n", - " './generate_csv/train_18.csv',\n", - " './generate_csv/train_15.csv',\n", - " './generate_csv/train_17.csv',\n", - " './generate_csv/train_00.csv',\n", - " './generate_csv/train_01.csv',\n", - " './generate_csv/train_19.csv',\n", - " './generate_csv/train_14.csv',\n", - " './generate_csv/train_02.csv',\n", - " './generate_csv/train_16.csv',\n", - " './generate_csv/train_09.csv',\n", - " './generate_csv/train_03.csv',\n", - " './generate_csv/train_12.csv',\n", - " './generate_csv/train_10.csv',\n", - " './generate_csv/train_13.csv',\n", - " './generate_csv/train_05.csv',\n", - " './generate_csv/train_07.csv',\n", - " './generate_csv/train_04.csv',\n", - " './generate_csv/train_06.csv']\n", - "['./generate_csv/valid_01.csv',\n", - " './generate_csv/valid_05.csv',\n", - " './generate_csv/valid_02.csv',\n", - " './generate_csv/valid_04.csv',\n", - " './generate_csv/valid_08.csv',\n", - " './generate_csv/valid_07.csv',\n", - " './generate_csv/valid_06.csv',\n", - " './generate_csv/valid_00.csv',\n", - " './generate_csv/valid_09.csv',\n", - " './generate_csv/valid_03.csv']\n", - "['./generate_csv/test_00.csv',\n", - " './generate_csv/test_07.csv',\n", - " './generate_csv/test_01.csv',\n", - " './generate_csv/test_08.csv',\n", - " './generate_csv/test_06.csv',\n", - " './generate_csv/test_02.csv',\n", - " './generate_csv/test_04.csv',\n", - " './generate_csv/test_05.csv',\n", - " './generate_csv/test_09.csv',\n", - " './generate_csv/test_03.csv']\n" - ] - } - ], - "source": [ - "source_dir = \"./generate_csv/\"\n", - "\n", - "#通过判断开头去添加文件\n", - "def get_filenames_by_prefix(source_dir, prefix_name):\n", - " all_files = os.listdir(source_dir)\n", - " results = []\n", - " for filename in all_files:\n", - " if filename.startswith(prefix_name):\n", - " results.append(os.path.join(source_dir, filename))\n", - " return results\n", - "\n", - "train_filenames = get_filenames_by_prefix(source_dir, \"train\")\n", - "valid_filenames = get_filenames_by_prefix(source_dir, \"valid\")\n", - "test_filenames = get_filenames_by_prefix(source_dir, \"test\")\n", - "\n", - "import pprint\n", - "pprint.pprint(train_filenames)\n", - "pprint.pprint(valid_filenames)\n", - "pprint.pprint(test_filenames)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "#下面的接口都是之前用过的\n", - "def parse_csv_line(line, n_fields = 9):\n", - " defs = [tf.constant(np.nan)] * n_fields\n", - " parsed_fields = tf.io.decode_csv(line, record_defaults=defs)\n", - " x = tf.stack(parsed_fields[0:-1])\n", - " y = tf.stack(parsed_fields[-1:])\n", - " return x, y\n", - "\n", - "def csv_reader_dataset(filenames, n_readers=5,\n", - " batch_size=32, n_parse_threads=5,\n", - " shuffle_buffer_size=10000):\n", - " dataset = tf.data.Dataset.list_files(filenames)\n", - " dataset = dataset.repeat()\n", - " dataset = dataset.interleave(\n", - " lambda filename: tf.data.TextLineDataset(filename).skip(1),\n", - " cycle_length = n_readers\n", - " )\n", - " dataset.shuffle(shuffle_buffer_size)\n", - " dataset = dataset.map(parse_csv_line,\n", - " num_parallel_calls=n_parse_threads)\n", - " dataset = dataset.batch(batch_size)\n", - " return dataset\n", - "\n", - "batch_size = 32\n", - "train_set = csv_reader_dataset(train_filenames,\n", - " batch_size = batch_size)\n", - "valid_set = csv_reader_dataset(valid_filenames,\n", - " batch_size = batch_size)\n", - "test_set = csv_reader_dataset(test_filenames,\n", - " batch_size = batch_size)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "chapter_4.tar.gz\t tf02_data_generate_csv.ipynb\r\n", - "generate_csv\t\t tf03-tfrecord_basic_api.ipynb\r\n", - "generate_tfrecords\t tf04_data_generate_tfrecord.ipynb\r\n", - "temp.csv\t\t tfrecord_basic\r\n", - "tf01-dataset_basic_api.ipynb\r\n" - ] - } - ], - "source": [ - "!ls" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# 把train_set,valid_set,test_set 存储到tfrecord类型的文件中" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "#把基础的如何序列化的步骤搞到一个函数\n", - "def serialize_example(x, y):\n", - " \"\"\"Converts x, y to tf.train.Example and serialize\"\"\"\n", - " input_feautres = tf.train.FloatList(value = x) #特征\n", - " label = tf.train.FloatList(value = y)#标签\n", - " features = tf.train.Features(\n", - " feature = {\n", - " \"input_features\": tf.train.Feature(\n", - " float_list = input_feautres),\n", - " \"label\": tf.train.Feature(float_list = label)\n", - " }\n", - " )\n", - " #把features变为example\n", - " example = tf.train.Example(features = features)\n", - " return example.SerializeToString() #把example序列化\n", - "#n_shards是存为多少个文件,steps_per_shard和 steps_per_epoch类似\n", - "def csv_dataset_to_tfrecords(base_filename, dataset,\n", - " n_shards, steps_per_shard,\n", - " compression_type = None):\n", - " #压缩文件类型\n", - " options = tf.io.TFRecordOptions(\n", - " compression_type = compression_type)\n", - " all_filenames = []\n", - " \n", - " for shard_id in range(n_shards):\n", - " filename_fullpath = '{}_{:05d}-of-{:05d}'.format(\n", - " base_filename, shard_id, n_shards) #base_filename是一个前缀\n", - " #打开文件\n", - " with tf.io.TFRecordWriter(filename_fullpath, options) as writer:\n", - " #取出数据,为什么skip,上一个文件写了前500行,下一个文件存后面的数据\n", - " for x_batch, y_batch in dataset.skip(shard_id * steps_per_shard).take(steps_per_shard):\n", - " for x_example, y_example in zip(x_batch, y_batch):\n", - " writer.write(\n", - " serialize_example(x_example, y_example))\n", - " all_filenames.append(filename_fullpath)\n", - " #返回所有tfrecord文件名\n", - " return all_filenames" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "!rm -rf generate_tfrecords" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": { - "collapsed": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(, )\n" - ] - } - ], - "source": [ - "for i in train_set.take(1):\n", - " print(i) " - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 40 s, sys: 8.95 s, total: 48.9 s\n", - "Wall time: 42.6 s\n" - ] - } - ], - "source": [ - "%%time\n", - "# 训练集和测试集都分20\n", - "n_shards = 20\n", - "train_steps_per_shard = 11610 // batch_size // n_shards\n", - "valid_steps_per_shard = 3880 // batch_size // 10\n", - "test_steps_per_shard = 5170 // batch_size // 10\n", - "\n", - "output_dir = \"generate_tfrecords\"\n", - "if not os.path.exists(output_dir):\n", - " os.mkdir(output_dir)\n", - "\n", - "train_basename = os.path.join(output_dir, \"train\")\n", - "valid_basename = os.path.join(output_dir, \"valid\")\n", - "test_basename = os.path.join(output_dir, \"test\")\n", - "\n", - "train_tfrecord_filenames = csv_dataset_to_tfrecords(\n", - " train_basename, train_set, n_shards, train_steps_per_shard, None)\n", - "valid_tfrecord_filenames = csv_dataset_to_tfrecords(\n", - " valid_basename, valid_set, 10, valid_steps_per_shard, None)\n", - "test_tfrecord_fielnames = csv_dataset_to_tfrecords(\n", - " test_basename, test_set, 10, test_steps_per_shard, None)\n", - "#执行会发现目录下总计生成了60个文件,这里文件数目改为一致,为了对比时间" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "总用量 1960\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00000-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00001-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00002-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00003-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00004-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00005-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00006-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00007-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00008-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 47616 Jul 23 11:33 test_00009-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00000-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00001-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00002-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00003-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00004-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00005-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00006-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00007-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:32 train_00008-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00009-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00010-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00011-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00012-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00013-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00014-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00015-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00016-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00017-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00018-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 53568 Jul 23 11:33 train_00019-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00000-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00001-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00002-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00003-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00004-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00005-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00006-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00007-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00008-of-00010\r\n", - "-rw-rw-r-- 1 luke luke 35712 Jul 23 11:33 valid_00009-of-00010\r\n" - ] - } - ], - "source": [ - "!ls -l generate_tfrecords" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [], - "source": [ - "#生成一下压缩的\n", - "# n_shards = 20\n", - "# train_steps_per_shard = 11610 // batch_size // n_shards\n", - "# valid_steps_per_shard = 3880 // batch_size // n_shards\n", - "# test_steps_per_shard = 5170 // batch_size // n_shards\n", - "\n", - "# output_dir = \"generate_tfrecords_zip\"\n", - "# if not os.path.exists(output_dir):\n", - "# os.mkdir(output_dir)\n", - "\n", - "# train_basename = os.path.join(output_dir, \"train\")\n", - "# valid_basename = os.path.join(output_dir, \"valid\")\n", - "# test_basename = os.path.join(output_dir, \"test\")\n", - "# #只需修改参数的类型即可\n", - "# train_tfrecord_filenames = csv_dataset_to_tfrecords(\n", - "# train_basename, train_set, n_shards, train_steps_per_shard,\n", - "# compression_type = \"GZIP\")\n", - "# valid_tfrecord_filenames = csv_dataset_to_tfrecords(\n", - "# valid_basename, valid_set, n_shards, valid_steps_per_shard,\n", - "# compression_type = \"GZIP\")\n", - "# test_tfrecord_fielnames = csv_dataset_to_tfrecords(\n", - "# test_basename, test_set, n_shards, test_steps_per_shard,\n", - "# compression_type = \"GZIP\")" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "总用量 860\r\n", - "-rw-rw-r-- 1 luke luke 10171 May 7 11:16 test_00000-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10230 May 7 11:16 test_00001-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10204 May 7 11:16 test_00002-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10213 May 7 11:16 test_00003-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10229 May 7 11:16 test_00004-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10200 May 7 11:16 test_00005-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10199 May 7 11:16 test_00006-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10215 May 7 11:16 test_00007-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10179 May 7 11:16 test_00008-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10149 May 7 11:16 test_00009-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10141 May 7 11:16 test_00010-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10221 May 7 11:16 test_00011-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10209 May 7 11:16 test_00012-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10214 May 7 11:16 test_00013-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10212 May 7 11:16 test_00014-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10209 May 7 11:16 test_00015-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10185 May 7 11:16 test_00016-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10266 May 7 11:16 test_00017-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10258 May 7 11:16 test_00018-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 10170 May 7 11:16 test_00019-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22359 May 7 19:17 train_00000-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22447 May 7 19:17 train_00001-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22366 May 7 19:17 train_00002-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22311 May 7 19:17 train_00003-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22384 May 7 19:17 train_00004-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22341 May 7 19:17 train_00005-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22416 May 7 19:17 train_00006-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22285 May 7 19:17 train_00007-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22415 May 7 19:17 train_00008-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22365 May 7 19:17 train_00009-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22431 May 7 19:17 train_00010-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22367 May 7 19:17 train_00011-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22346 May 7 19:17 train_00012-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22332 May 7 19:17 train_00013-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22452 May 7 19:17 train_00014-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 20 May 7 19:17 train_00015-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22427 May 7 11:16 train_00016-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22427 May 7 11:16 train_00017-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22454 May 7 11:16 train_00018-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 22309 May 7 11:16 train_00019-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7747 May 7 11:16 valid_00000-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7744 May 7 11:16 valid_00001-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7749 May 7 11:16 valid_00002-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7755 May 7 11:16 valid_00003-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7744 May 7 11:16 valid_00004-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7678 May 7 11:16 valid_00005-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7762 May 7 11:16 valid_00006-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7720 May 7 11:16 valid_00007-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7727 May 7 11:16 valid_00008-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7739 May 7 11:16 valid_00009-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7762 May 7 11:16 valid_00010-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7727 May 7 11:16 valid_00011-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7729 May 7 11:16 valid_00012-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7763 May 7 11:16 valid_00013-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7727 May 7 11:16 valid_00014-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7749 May 7 11:16 valid_00015-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7741 May 7 11:16 valid_00016-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7753 May 7 11:16 valid_00017-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7702 May 7 11:16 valid_00018-of-00020\r\n", - "-rw-rw-r-- 1 luke luke 7711 May 7 11:16 valid_00019-of-00020\r\n" - ] - } - ], - "source": [ - "!ls -l generate_tfrecords_zip" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['generate_tfrecords/train_00000-of-00020',\n", - " 'generate_tfrecords/train_00001-of-00020',\n", - " 'generate_tfrecords/train_00002-of-00020',\n", - " 'generate_tfrecords/train_00003-of-00020',\n", - " 'generate_tfrecords/train_00004-of-00020',\n", - " 'generate_tfrecords/train_00005-of-00020',\n", - " 'generate_tfrecords/train_00006-of-00020',\n", - " 'generate_tfrecords/train_00007-of-00020',\n", - " 'generate_tfrecords/train_00008-of-00020',\n", - " 'generate_tfrecords/train_00009-of-00020',\n", - " 'generate_tfrecords/train_00010-of-00020',\n", - " 'generate_tfrecords/train_00011-of-00020',\n", - " 'generate_tfrecords/train_00012-of-00020',\n", - " 'generate_tfrecords/train_00013-of-00020',\n", - " 'generate_tfrecords/train_00014-of-00020',\n", - " 'generate_tfrecords/train_00015-of-00020',\n", - " 'generate_tfrecords/train_00016-of-00020',\n", - " 'generate_tfrecords/train_00017-of-00020',\n", - " 'generate_tfrecords/train_00018-of-00020',\n", - " 'generate_tfrecords/train_00019-of-00020']\n", - "['generate_tfrecords/valid_00000-of-00010',\n", - " 'generate_tfrecords/valid_00001-of-00010',\n", - " 'generate_tfrecords/valid_00002-of-00010',\n", - " 'generate_tfrecords/valid_00003-of-00010',\n", - " 'generate_tfrecords/valid_00004-of-00010',\n", - " 'generate_tfrecords/valid_00005-of-00010',\n", - " 'generate_tfrecords/valid_00006-of-00010',\n", - " 'generate_tfrecords/valid_00007-of-00010',\n", - " 'generate_tfrecords/valid_00008-of-00010',\n", - " 'generate_tfrecords/valid_00009-of-00010']\n", - "['generate_tfrecords/test_00000-of-00010',\n", - " 'generate_tfrecords/test_00001-of-00010',\n", - " 'generate_tfrecords/test_00002-of-00010',\n", - " 'generate_tfrecords/test_00003-of-00010',\n", - " 'generate_tfrecords/test_00004-of-00010',\n", - " 'generate_tfrecords/test_00005-of-00010',\n", - " 'generate_tfrecords/test_00006-of-00010',\n", - " 'generate_tfrecords/test_00007-of-00010',\n", - " 'generate_tfrecords/test_00008-of-00010',\n", - " 'generate_tfrecords/test_00009-of-00010']\n" - ] - } - ], - "source": [ - "#打印一下文件名\n", - "pprint.pprint(train_tfrecord_filenames)\n", - "pprint.pprint(valid_tfrecord_filenames)\n", - "pprint.pprint(test_tfrecord_fielnames)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CPU times: user 58 µs, sys: 14 µs, total: 72 µs\n", - "Wall time: 80.1 µs\n" - ] - } - ], - "source": [ - "%%time\n", - "#把数据读取出来\n", - "expected_features = {\n", - " \"input_features\": tf.io.FixedLenFeature([8], dtype=tf.float32),\n", - " \"label\": tf.io.FixedLenFeature([1], dtype=tf.float32)\n", - "}\n", - "\n", - "def parse_example(serialized_example):\n", - " example = tf.io.parse_single_example(serialized_example,\n", - " expected_features)\n", - " return example[\"input_features\"], example[\"label\"]\n", - "\n", - "def tfrecords_reader_dataset(filenames, n_readers=5,\n", - " batch_size=32, n_parse_threads=5,\n", - " shuffle_buffer_size=10000):\n", - " dataset = tf.data.Dataset.list_files(filenames)\n", - " dataset = dataset.repeat() #为了能够无限次epoch\n", - " dataset = dataset.interleave(\n", - "# lambda filename: tf.data.TFRecordDataset(\n", - "# filename, compression_type = \"GZIP\"),\n", - " lambda filename: tf.data.TFRecordDataset(\n", - " filename),\n", - " cycle_length = n_readers\n", - " )\n", - " #洗牌,就是给数据打乱,样本顺序打乱\n", - " dataset.shuffle(shuffle_buffer_size)\n", - " dataset = dataset.map(parse_example,\n", - " num_parallel_calls=n_parse_threads)#把对应的一个样本是字节流的,变为浮点类型\n", - " dataset = dataset.batch(batch_size) #原来写进去是一条一条的sample,要分配\n", - " return dataset\n", - "\n", - "#测试一下,tfrecords_reader_dataset是否可以正常运行\n", - "# tfrecords_train = tfrecords_reader_dataset(train_tfrecord_filenames,\n", - "# batch_size = 3)\n", - "# for x_batch, y_batch in tfrecords_train.take(10):\n", - "# print(x_batch)\n", - "# print(y_batch)" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": { - "collapsed": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "WARNING:tensorflow:AutoGraph could not transform . at 0x7f98284712f0> and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . at 0x7f98284712f0>. Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "WARNING: AutoGraph could not transform . at 0x7f98284712f0> and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . at 0x7f98284712f0>. Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "WARNING:tensorflow:AutoGraph could not transform and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "WARNING: AutoGraph could not transform and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "WARNING:tensorflow:AutoGraph could not transform . at 0x7f9828471378> and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . at 0x7f9828471378>. Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "WARNING: AutoGraph could not transform . at 0x7f9828471378> and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . at 0x7f9828471378>. Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "WARNING:tensorflow:AutoGraph could not transform . at 0x7f9828471598> and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . at 0x7f9828471598>. Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "WARNING: AutoGraph could not transform . at 0x7f9828471598> and will run it as-is.\n", - "Please report this to the TensorFlow team. When filing the bug, set the verbosity to 10 (on Linux, `export AUTOGRAPH_VERBOSITY=10`) and attach the full output.\n", - "Cause: Unable to locate the source code of . at 0x7f9828471598>. Note that functions defined in certain environments, like the interactive Python shell do not expose their source code. If that is the case, you should to define them in a .py source file. If you are certain the code is graph-compatible, wrap the call using @tf.autograph.do_not_convert. Original error: could not get source code\n", - "To silence this warning, decorate the function with @tf.autograph.experimental.do_not_convert\n", - "CPU times: user 218 ms, sys: 83.8 ms, total: 302 ms\n", - "Wall time: 294 ms\n" - ] - } - ], - "source": [ - "%%time\n", - "#得到dataset,dataset是tensor,可以直接拿tensor训练\n", - "\n", - "batch_size = 32\n", - "tfrecords_train_set = tfrecords_reader_dataset(\n", - " train_tfrecord_filenames, batch_size = batch_size)\n", - "tfrecords_valid_set = tfrecords_reader_dataset(\n", - " valid_tfrecord_filenames, batch_size = batch_size)\n", - "tfrecords_test_set = tfrecords_reader_dataset(\n", - " test_tfrecord_fielnames, batch_size = batch_size)" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "tensorflow.python.data.ops.dataset_ops.BatchDataset" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "type(tfrecords_train_set)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(, )\n" - ] - } - ], - "source": [ - "for i in tfrecords_train_set.take(1):\n", - " print(i)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.8099 - val_loss: 0.6248\n", - "Epoch 2/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.5202 - val_loss: 0.5199\n", - "Epoch 3/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.4712 - val_loss: 0.4874\n", - "Epoch 4/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.4509 - val_loss: 0.4747\n", - "Epoch 5/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.4298 - val_loss: 0.4615\n", - "Epoch 6/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.4159 - val_loss: 0.4296\n", - "Epoch 7/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.4033 - val_loss: 0.4194\n", - "Epoch 8/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.4042 - val_loss: 0.4123\n", - "Epoch 9/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.5006 - val_loss: 0.4300\n", - "Epoch 10/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3920 - val_loss: 0.4135\n", - "Epoch 11/100\n", - "348/348 [==============================] - 1s 3ms/step - loss: 0.3976 - val_loss: 0.4100\n", - "Epoch 12/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3836 - val_loss: 0.3966\n", - "Epoch 13/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3744 - val_loss: 0.3917\n", - "Epoch 14/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.4394 - val_loss: 0.4169\n", - "Epoch 15/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3968 - val_loss: 0.3938\n", - "Epoch 16/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3682 - val_loss: 0.3880\n", - "Epoch 17/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3709 - val_loss: 0.3835\n", - "Epoch 18/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3666 - val_loss: 0.3795\n", - "Epoch 19/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3692 - val_loss: 0.3756\n", - "Epoch 20/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3587 - val_loss: 0.3736\n", - "Epoch 21/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3554 - val_loss: 0.3765\n", - "Epoch 22/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3619 - val_loss: 0.3732\n", - "Epoch 23/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3529 - val_loss: 0.4280\n", - "Epoch 24/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3537 - val_loss: 0.3658\n", - "Epoch 25/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3515 - val_loss: 0.3704\n", - "Epoch 26/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3707 - val_loss: 0.3642\n", - "Epoch 27/100\n", - "348/348 [==============================] - 1s 2ms/step - loss: 0.3512 - val_loss: 0.3651\n" - ] - } - ], - "source": [ - "#开始训练\n", - "model = keras.models.Sequential([\n", - " keras.layers.Dense(30, activation='relu',\n", - " input_shape=[8]),\n", - " keras.layers.Dense(1),\n", - "])\n", - "model.compile(loss=\"mean_squared_error\", optimizer=\"sgd\")\n", - "callbacks = [keras.callbacks.EarlyStopping(\n", - " patience=5, min_delta=1e-2)]\n", - "\n", - "history = model.fit(tfrecords_train_set,\n", - " validation_data = tfrecords_valid_set,\n", - " steps_per_epoch = 11160 // batch_size,\n", - " validation_steps = 3870 // batch_size,\n", - " epochs = 100,\n", - " callbacks = callbacks)" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "161/161 [==============================] - 0s 2ms/step - loss: 0.3376\n" - ] - }, - { - "data": { - "text/plain": [ - "0.33755674958229065" - ] - }, - "execution_count": 20, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.evaluate(tfrecords_test_set, steps = 5160 // batch_size)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tf09_keras_regression-wide_deep-subclass.ipynb b/tf09_keras_regression-wide_deep-subclass.ipynb deleted file mode 100644 index 8ca9572..0000000 --- a/tf09_keras_regression-wide_deep-subclass.ipynb +++ /dev/null @@ -1,407 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.2.0\n", - "sys.version_info(major=3, minor=6, micro=9, releaselevel='final', serial=0)\n", - "matplotlib 3.2.1\n", - "numpy 1.18.5\n", - "pandas 1.0.4\n", - "sklearn 0.23.1\n", - "tensorflow 2.2.0\n", - "tensorflow.keras 2.3.0-tf\n" - ] - } - ], - "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "import numpy as np\n", - "import sklearn\n", - "import pandas as pd\n", - "import os\n", - "import sys\n", - "import time\n", - "import tensorflow as tf\n", - "\n", - "from tensorflow import keras\n", - "\n", - "print(tf.__version__)\n", - "print(sys.version_info)\n", - "for module in mpl, np, pd, sklearn, tf, keras:\n", - " print(module.__name__, module.__version__)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ".. _california_housing_dataset:\n", - "\n", - "California Housing dataset\n", - "--------------------------\n", - "\n", - "**Data Set Characteristics:**\n", - "\n", - " :Number of Instances: 20640\n", - "\n", - " :Number of Attributes: 8 numeric, predictive attributes and the target\n", - "\n", - " :Attribute Information:\n", - " - MedInc median income in block\n", - " - HouseAge median house age in block\n", - " - AveRooms average number of rooms\n", - " - AveBedrms average number of bedrooms\n", - " - Population block population\n", - " - AveOccup average house occupancy\n", - " - Latitude house block latitude\n", - " - Longitude house block longitude\n", - "\n", - " :Missing Attribute Values: None\n", - "\n", - "This dataset was obtained from the StatLib repository.\n", - "http://lib.stat.cmu.edu/datasets/\n", - "\n", - "The target variable is the median house value for California districts.\n", - "\n", - "This dataset was derived from the 1990 U.S. census, using one row per census\n", - "block group. A block group is the smallest geographical unit for which the U.S.\n", - "Census Bureau publishes sample data (a block group typically has a population\n", - "of 600 to 3,000 people).\n", - "\n", - "It can be downloaded/loaded using the\n", - ":func:`sklearn.datasets.fetch_california_housing` function.\n", - "\n", - ".. topic:: References\n", - "\n", - " - Pace, R. Kelley and Ronald Barry, Sparse Spatial Autoregressions,\n", - " Statistics and Probability Letters, 33 (1997) 291-297\n", - "\n", - "(20640, 8)\n", - "(20640,)\n" - ] - } - ], - "source": [ - "from sklearn.datasets import fetch_california_housing\n", - "\n", - "housing = fetch_california_housing()\n", - "print(housing.DESCR)\n", - "print(housing.data.shape)\n", - "print(housing.target.shape)" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(11610, 8) (11610,)\n", - "(3870, 8) (3870,)\n", - "(5160, 8) (5160,)\n" - ] - } - ], - "source": [ - "from sklearn.model_selection import train_test_split\n", - "\n", - "x_train_all, x_test, y_train_all, y_test = train_test_split(\n", - " housing.data, housing.target, random_state = 7)\n", - "x_train, x_valid, y_train, y_valid = train_test_split(\n", - " x_train_all, y_train_all, random_state = 11)\n", - "print(x_train.shape, y_train.shape)\n", - "print(x_valid.shape, y_valid.shape)\n", - "print(x_test.shape, y_test.shape)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from sklearn.preprocessing import StandardScaler\n", - "\n", - "scaler = StandardScaler()\n", - "x_train_scaled = scaler.fit_transform(x_train)\n", - "x_valid_scaled = scaler.transform(x_valid)\n", - "x_test_scaled = scaler.transform(x_test)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Model: \"wide_deep_model_2\"\n", - "_________________________________________________________________\n", - "Layer (type) Output Shape Param # \n", - "=================================================================\n", - "dense_6 (Dense) multiple 270 \n", - "_________________________________________________________________\n", - "dense_7 (Dense) multiple 930 \n", - "_________________________________________________________________\n", - "dense_8 (Dense) multiple 39 \n", - "=================================================================\n", - "Total params: 1,239\n", - "Trainable params: 1,239\n", - "Non-trainable params: 0\n", - "_________________________________________________________________\n", - "None\n" - ] - } - ], - "source": [ - "# 子类API\n", - "#Model类的介绍看下面\n", - "# https://tensorflow.google.cn/api_docs/python/tf/keras/Model\n", - "#其实就是将原有面向过程的代码,改为面向对象\n", - "class WideDeepModel(keras.models.Model):\n", - " def __init__(self):\n", - " super(WideDeepModel, self).__init__()\n", - " \"\"\"定义模型的层次\"\"\"\n", - " #初始化了3个全连接层的层对象\n", - " self.hidden1_layer = keras.layers.Dense(30, activation='relu')\n", - " self.hidden2_layer = keras.layers.Dense(30, activation='relu')\n", - " self.output_layer = keras.layers.Dense(1)\n", - " \n", - " def call(self, input):\n", - " \"\"\"完成模型的正向计算(搭建),call是被build方法调用的\"\"\"\n", - " #这里的input是输入的特征形状\n", - " hidden1 = self.hidden1_layer(input)\n", - " hidden2 = self.hidden2_layer(hidden1)\n", - " concat = keras.layers.concatenate([input, hidden2])\n", - " output = self.output_layer(concat)\n", - " return output\n", - "\n", - "model = WideDeepModel()\n", - "#下面这种也可以,和model = WideDeepModel()是没有打印细节\n", - "# model = keras.models.Sequential([\n", - "# WideDeepModel(),\n", - "# ])\n", - "#build等价于调用call,29行和30行是等价的\n", - "# model(input_shape=(None, 8))\n", - "model.build(input_shape=(None, 8))\n", - " \n", - "print(model.summary())\n", - "model.compile(loss=\"mean_squared_error\",\n", - " optimizer = keras.optimizers.SGD(0.001))\n", - "callbacks = [keras.callbacks.EarlyStopping(\n", - " patience=5, min_delta=1e-2)]" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/100\n", - "363/363 [==============================] - 1s 3ms/step - loss: 2.8035 - val_loss: 1.0274\n", - "Epoch 2/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.7387 - val_loss: 0.6841\n", - "Epoch 3/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.6182 - val_loss: 0.6414\n", - "Epoch 4/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5884 - val_loss: 0.6184\n", - "Epoch 5/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5689 - val_loss: 0.6016\n", - "Epoch 6/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5555 - val_loss: 0.5885\n", - "Epoch 7/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5444 - val_loss: 0.5785\n", - "Epoch 8/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5350 - val_loss: 0.5679\n", - "Epoch 9/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5276 - val_loss: 0.5606\n", - "Epoch 10/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5214 - val_loss: 0.5531\n", - "Epoch 11/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5158 - val_loss: 0.5499\n", - "Epoch 12/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5110 - val_loss: 0.5429\n", - "Epoch 13/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5059 - val_loss: 0.5385\n", - "Epoch 14/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5008 - val_loss: 0.5325\n", - "Epoch 15/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4973 - val_loss: 0.5282\n", - "Epoch 16/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4933 - val_loss: 0.5243\n", - "Epoch 17/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4890 - val_loss: 0.5195\n", - "Epoch 18/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4858 - val_loss: 0.5162\n", - "Epoch 19/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4830 - val_loss: 0.5129\n", - "Epoch 20/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4794 - val_loss: 0.5113\n", - "Epoch 21/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4764 - val_loss: 0.5058\n", - "Epoch 22/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4734 - val_loss: 0.5035\n", - "Epoch 23/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4707 - val_loss: 0.4990\n", - "Epoch 24/100\n", - "363/363 [==============================] - 1s 1ms/step - loss: 0.4676 - val_loss: 0.4974\n", - "Epoch 25/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4657 - val_loss: 0.4938\n", - "Epoch 26/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4632 - val_loss: 0.4917\n", - "Epoch 27/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4600 - val_loss: 0.4888\n", - "Epoch 28/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4579 - val_loss: 0.4864\n", - "Epoch 29/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4554 - val_loss: 0.4835\n", - "Epoch 30/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4529 - val_loss: 0.4804\n", - "Epoch 31/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4505 - val_loss: 0.4786\n", - "Epoch 32/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4483 - val_loss: 0.4758\n", - "Epoch 33/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4461 - val_loss: 0.4749\n", - "Epoch 34/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4431 - val_loss: 0.4708\n", - "Epoch 35/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4414 - val_loss: 0.4681\n", - "Epoch 36/100\n", - "363/363 [==============================] - 1s 1ms/step - loss: 0.4394 - val_loss: 0.4661\n", - "Epoch 37/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4383 - val_loss: 0.4651\n", - "Epoch 38/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4354 - val_loss: 0.4635\n", - "Epoch 39/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4335 - val_loss: 0.4605\n", - "Epoch 40/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4319 - val_loss: 0.4587\n", - "Epoch 41/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4293 - val_loss: 0.4571\n", - "Epoch 42/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4274 - val_loss: 0.4542\n", - "Epoch 43/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4254 - val_loss: 0.4520\n", - "Epoch 44/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4242 - val_loss: 0.4504\n", - "Epoch 45/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4219 - val_loss: 0.4490\n", - "Epoch 46/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4198 - val_loss: 0.4473\n", - "Epoch 47/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4196 - val_loss: 0.4449\n", - "Epoch 48/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4173 - val_loss: 0.4432\n", - "Epoch 49/100\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4154 - val_loss: 0.4417\n" - ] - } - ], - "source": [ - "history = model.fit(x_train_scaled, y_train,\n", - " validation_data = (x_valid_scaled, y_valid),\n", - " epochs = 100,\n", - " callbacks = callbacks)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAe0AAAEzCAYAAAAcgFukAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deZxddZ3n/9fn7rVlTyohCUlYAySQSEAYFArQgNCC/mwHEByw1fQouMyvm2m0+4cOLq3y6La7p7EhozTiKIEfjd0ooSGtFMuwmICBJCwhiUCqyL7Wfrfv/HHOrbpVqUrdqjqhcuq8n4/HeZz93O/9kuLz3c73mnMOEREROfrFRjsBIiIiUhkFbRERkZBQ0BYREQkJBW0REZGQUNAWEREJCQVtERGRkBg0aJvZbDN7wsxeNbMNZvaVfq4xM/sHM9tkZq+Y2fvKzl1vZm/6y/VBfwEREZGosMHe0zazGcAM59xLZlYHvAh8zDn3atk1lwFfAi4D3g/8vXPu/WY2CVgDLAGcf++Zzrl9R+TbiIiIjGGD1rSdc9uccy/52y3Aa8DMPpddCdzrPM8DE/xgfwmwyjm31w/Uq4BLA/0GIiIiETGkPm0zmwssBl7oc2omsLVsv8k/NtBxERERGaJEpReaWS3wL8BXnXMHg06ImS0DlgFUVVWdOXv27MCeXSwWicVi7Ol0tOUcx9Zp/N1IlPJTgqM8DZ7yNFjKz+CV5+nGjRt3O+emDnZPRUHbzJJ4AfvnzrmH+rmkGSiPsrP8Y81AQ5/jjf19hnNuObAcYMmSJW7NmjWVJK0ijY2NNDQ08O1fv8p9v3uHDbephX4kSvkpwVGeBk95GizlZ/DK89TM3q7knkpGjxvwE+A159zfDnDZw8B/8UeRnwMccM5tAx4DlprZRDObCCz1j42KdDJGV744Wh8vIiIyIpXUtM8DPg2sM7O1/rGvA8cCOOfuBFbijRzfBLQDn/HP7TWzbwGr/ftuc87tDS75Q5OKx8kXHYWiIx6z0UqGiIjIsAwatJ1zzwCHjXDOe2/sxgHO3Q3cPazUBSyd9BoWsvkiVan4KKdGRERkaCoeiDYWpBNe0O7KFxS0RUQClMvlaGpqorOzc7STclTLZDLMmjWLZDI5rPsjFbRT3UFb/doiIkFqamqirq6OuXPn4g2Fkr6cc+zZs4empibmzZs3rGdEavx+OuHVrrtyCtoiIkHq7Oxk8uTJCtiHYWZMnjx5RK0REQvafp92oTDKKRERGXsUsAc30jyKZNDuVE1bRGTMqa2tHe0kHHGRCtrq0xYRkTCLVNAu9WlnFbRFRMYs5xw333wzCxYsYOHChdx///0AbNu2jfPPP59FixaxYMECnn76aQqFAjfccEP3tT/84Q9HOfWHF6nR46X3tLvy6tMWERmrHnroIdauXcvLL7/M7t27Oeusszj//PP5xS9+wSWXXMJf/uVfUigUaG9vZ+3atTQ3N7N+/XoA9u/fP8qpP7xIBe1UXM3jIiJH2v/41QZefTfY35U69ZhxfOOjp1V07TPPPMM111xDPB6nvr6eCy64gNWrV3PWWWfxJ3/yJ+RyOT72sY+xaNEijjvuOLZs2cKXvvQlLr/8cpYuXRpouoMWqebxTFJBW0Qkqs4//3yeeuopZs6cyQ033MC9997LxIkTefnll2loaODOO+/kc5/73Ggn87AiVdNWn7aIyJFXaY34SPngBz/IXXfdxfXXX8/evXt56qmnuP3223n77beZNWsWn//85+nq6uKll17isssuI5VK8YlPfIKTTz6Z6667blTTPpiIBW31aYuIjHUf//jHee655zjjjDMwM37wgx8wffp0fvrTn3L77beTTCapra3l3nvvpbm5mc985jMUi15l7q//+q9HOfWHF6mg3f3Kl97TFhEZc1pbWwFvApPbb7+d22+/vdf566+/nuuvv/6Q+1566aX3JH1BiFSfdnfzeEFBW0REwidSQVs1bRERCbNIBe14zEjETH3aIiISSpEK2uANRtMrXyIiEkbRC9rJuF75EhGRUIpe0E7E1DwuIiKhFLmgnVLzuIiIhFTkgnY6EVPzuIhIxB3ut7ffeustFixY8B6mpnIRDNpx1bRFRCSUIhe0U+rTFhEZc2655RbuuOOO7v1vfvObfPvb3+biiy/mfe97HwsXLuTf/u3fhvzczs5OPvOZz7Bw4UIWL17ME088AcCGDRs4++yzWbRoEaeffjpvvvkmbW1tXH755ZxxxhksWLCg+3e8gxSpaUzBH4imyVVERI6cR2+B7euCfeb0hfCR7w14+qqrruKrX/0qN954IwAPPPAAjz32GF/+8pcZN24cu3fv5pxzzuGKK67AzCr+2DvuuAMzY926dbz++ussXbqUjRs3cuedd/KVr3yFa6+9lmw2S6FQYOXKlRxzzDE88sgjABw4cGBk37kfkatppxMxTWMqIjLGLF68mJ07d/Luu+/y8ssvM3HiRKZPn87Xv/51Tj/9dD70oQ/R3NzMjh07hvTcZ555pvuXv+bPn8+cOXPYuHEj5557Lt/97nf5/ve/z9tvv01VVRULFy5k1apV/MVf/AVPP/0048ePD/x7DlrTNrO7gT8CdjrnDumZN7ObgWvLnncKMNU5t9fM3gJagAKQd84tCSrhw5VOxFXTFhE5kg5TIz6SPvnJT/Lggw+yfft2rrrqKn7+85+za9cuXnzxRZLJJHPnzqWzszOQz/rUpz7F+9//fh555BEuu+wy7rrrLi666CJeeuklVq5cyV/91V9x8cUXc+uttwbyeSWV1LTvAS4d6KRz7nbn3CLn3CLga8CTzrm9ZZdc6J8f9YAN6tMWERmrrrrqKlasWMGDDz7IJz/5SQ4cOMC0adNIJpM88cQTvP3220N+5gc/+EF+/vOfA7Bx40beeecdTj75ZLZs2cJxxx3Hl7/8Za688kpeeeUV3n33Xaqrq7nuuuu4+eabj8ivhw1a03bOPWVmcyt83jXAfSNJ0JGmV75ERMam0047jZaWFmbOnMmMGTO49tpr+ehHP8rChQtZsmQJ8+fPH/Izv/jFL/KFL3yBhQsXkkgkuOeee0in0zzwwAP87Gc/I5lMdjfDr169mptvvplYLEYymeSf/umfAv+OgQ1EM7NqvBr5TWWHHfC4mTngLufc8qA+b7jSSU2uIiIyVq1b1zMAbsqUKTz33HP9Xlf67e3+zJ07l/Xr1wOQyWT453/+50OuueWWW7jlllt6Hbvkkku45JJLhpPsiplzbvCLvJr2r/vr0y675irgOufcR8uOzXTONZvZNGAV8CXn3FMD3L8MWAZQX19/5ooVK4byPQ6rtbW1+0X6X7zWxVNNee78cE1gz4+a8vyUYChPg6c8DdZg+Tl+/HhOOOGE9zBF4bVp0yYOHDjQK08vvPDCFyvpRg7yla+r6dM07pxr9tc7zeyXwNlAv0Hbr4UvB1iyZIlraGgILGGNjY2UnvdC5+s80bSFIJ8fNeX5KcFQngZPeRqswfLztddeo66u7r1LUADWrVvHpz/96V7H0uk0L7zwwhH93Ewmw+LFi4f1bzSQoG1m44ELgOvKjtUAMedci7+9FLgtiM8biXQiRq7gKBYdsVjl7+qJiMjYsnDhQtauXTvayRiSSl75ug9oAKaYWRPwDSAJ4Jy707/s48Djzrm2slvrgV/6L7EngF845/49uKQPTzoRByBbKJKJxUc5NSIiY4dzbkgTl0RRJV3Sh1PJ6PFrKrjmHrxXw8qPbQHOGG7CjpRUwnvLrStXJJNU0BYRCUImk2HPnj1MnjxZgXsAzjn27NlDJpMZ9jMiOY0pQFehgN9gICIiIzRr1iyamprYtWvXaCflqJbJZJg1a9aw749u0NasaCIigUkmk8ybN2+0kzHmRW7u8e7mcb2rLSIiIRO5oF0aiKapTEVEJGyiF7ST3lfWVKYiIhI20Qvaah4XEZGQUtAWEREJiQgGbX9yFQVtEREJmQgG7VJNWwPRREQkXCIXtFN6T1tEREIqckG755UvBW0REQmXCAbt0itfah4XEZFwiVzQ1oxoIiISVpEL2nrlS0REwipyQTsRjxGPmV75EhGR0Ilc0Aavtq1XvkREJGwiGbRTiZiax0VEJHQiGbTTiZje0xYRkdCJaNCOky0oaIuISLhEMmin1KctIiIhFMmgreZxEREJo8gGbTWPi4hI2EQ0aMdV0xYRkdCJZNBWn7aIiIRRJIN2Wu9pi4hICA0atM3sbjPbaWbrBzjfYGYHzGytv9xadu5SM3vDzDaZ2S1BJnwk0sm4pjEVEZHQqaSmfQ9w6SDXPO2cW+QvtwGYWRy4A/gIcCpwjZmdOpLEBiUVV01bRETCZ9Cg7Zx7Ctg7jGefDWxyzm1xzmWBFcCVw3hO4NJJ9WmLiEj4BNWnfa6ZvWxmj5rZaf6xmcDWsmua/GOjTn3aIiISRokAnvESMMc512pmlwH/Cpw41IeY2TJgGUB9fT2NjY0BJM3T2tra63k73s3Skc0H+hlR0jc/ZeSUp8FTngZL+Rm84eTpiIO2c+5g2fZKM/uRmU0BmoHZZZfO8o8N9JzlwHKAJUuWuIaGhpEmrVtjYyPlz3spt5GVf3iTCy64ADML7HOiom9+ysgpT4OnPA2W8jN4w8nTETePm9l08yOfmZ3tP3MPsBo40czmmVkKuBp4eKSfF4R0wvvaaiIXEZEwGbSmbWb3AQ3AFDNrAr4BJAGcc3cCfwx8wczyQAdwtXPOAXkzuwl4DIgDdzvnNhyRbzFEpaCdLRTJJOOjnBoREZHKDBq0nXPXDHL+H4F/HODcSmDl8JJ25HTXtHNFyIxyYkRERCoU0RnRvNq1XvsSEZEwiWbQTvrN4+rTFhGREIlm0NZANBERCaFIBu2UgraIiIRQJIN2d592Tn3aIiISHhEN2j2vfImIiIRFJIN2qvyVLxERkZCIZNDueeVLQVtERMIjokG71DyuPm0REQmPaAbtpJrHRUQkfCIZtFNxvfIlIiLhE8mgnU5qGlMREQmfaAbthKYxFRGR8Ilk0E7EDDM1j4uISLhEMmibGelETEFbRERCJZJBG7x3tdU8LiIiYRLhoB3TQDQREQmVyAbtVCKm97RFRCRUIhu01actIiJhE+GgHVfQFhGRUIls0E6pT1tEREImskFbzeMiIhI20Q3aSb3yJSIi4RLdoK2atoiIhExkg7b6tEVEJGwGDdpmdreZ7TSz9QOcv9bMXjGzdWb2rJmdUXbuLf/4WjNbE2TCRyqt97RFRCRkKqlp3wNcepjzfwAucM4tBL4FLO9z/kLn3CLn3JLhJfHISCfiZAsK2iIiEh6JwS5wzj1lZnMPc/7Zst3ngVkjT9aR59W01TwuIiLhEXSf9meBR8v2HfC4mb1oZssC/qwR0UA0EREJG3PODX6RV9P+tXNuwWGuuRD4EfAB59we/9hM51yzmU0DVgFfcs49NcD9y4BlAPX19WeuWLFiiF9lYK2trdTW1vY69tCbWX61Ocfdl1RjZoF9VhT0l58yMsrT4ClPg6X8DF55nl544YUvVtKNPGjzeCXM7HTgx8BHSgEbwDnX7K93mtkvgbOBfoO2c245fn/4kiVLXENDQxBJA6CxsZG+z1tffBO3eSPnffACUonIDqIflv7yU0ZGeRo85WmwlJ/BG06ejjhamdmxwEPAp51zG8uO15hZXWkbWAr0OwJ9NKQTcQC99iUiIqExaE3bzO4DGoApZtYEfANIAjjn7gRuBSYDP/KbmfN+Fb8e+KV/LAH8wjn370fgOwxLOumVV7ryRepGOS0iIiKVqGT0+DWDnP8c8Ll+jm8Bzjj0jqND2m8S11SmIiISFpHtzC31Y2sEuYiIhEVkg7b6tEVEJGwiHLTVPC4iIuES2aCt5nEREQmbyAbt7uZx/WiIiIiERISDdqmmrT5tEREJh+gG7aT6tEVEJFwiG7RTcfVpi4hIuEQ2aKeTeuVLRETCJbpBW698iYhIyEQ2aOuVLxERCZvIBu20graIiIRMZIN290C0nPq0RUQkHCIbtM2MdCJGV0E1bRERCYfIBm3w+rU1I5qIiIRFpIN2OhFXn7aIiIRGxIN2TK98iYhIaEQ+aGtyFRERCYtIB+1UIqbmcRERCY1IB+10Un3aIiISHtEO2okYWTWPi4hISEQ+aKumLSIiYaGgrfe0RUQkJCIetONkNSOaiIiExNgO2s7Bb7/DtB1P9ns6pVe+REQkRCoK2mZ2t5ntNLP1A5w3M/sHM9tkZq+Y2fvKzl1vZm/6y/VBJbwiZvDqvzJ11//p97Sax0VEJEwqrWnfA1x6mPMfAU70l2XAPwGY2STgG8D7gbOBb5jZxOEmdljqF1Db+na/pzQQTUREwqSioO2cewrYe5hLrgTudZ7ngQlmNgO4BFjlnNvrnNsHrOLwwT949adR1bkdOg8eciqdjGsaUxERCY2g+rRnAlvL9pv8YwMdf+/UL/DWO1875FQq7vVpO+fe0ySJiIgMR2K0E1BiZsvwmtapr6+nsbExkOemO1s4F9j49EO8O7Oj17l3m7IUHfzmiUYSMQvk86KgtbU1sP8+4lGeBk95GizlZ/CGk6dBBe1mYHbZ/iz/WDPQ0Od4Y38PcM4tB5YDLFmyxDU0NPR32dA5R27NVzhpfJaT+jxzY2wzvPk65573QWrSR0355ajX2NhIYP99BFCeHgnK02ApP4M3nDwNqnn8YeC/+KPIzwEOOOe2AY8BS81soj8Abal/7L1jRlvNPNix4ZBTqbj39TUYTUREwqCi6qWZ3YdXY55iZk14I8KTAM65O4GVwGXAJqAd+Ix/bq+ZfQtY7T/qNufc4Qa0HRGttXOYsONJKBYh1lNOSSfjAHpXW0REQqGioO2cu2aQ8w64cYBzdwN3Dz1pwWmrmQvNj8D+t2HSvO7j6YRf09a72iIiEgJje0Y0X2utH6h39J4bJp3watqaylRERMIgEkG7reZYwA7p106ppi0iIiESiaBdjKdh8vGwfV2v493N4+rTFhGREIhE0Aa8SVb61LRLQVuzoomISBhEK2jv+wN0tXYf6m4eV9AWEZEQiE7Qnl6azvTV7kOlgWhqHhcRkTCITtCuP81bl40gTydV0xYRkfCITtAePxvS42F7WdBW87iIiIRIdIK2mVfbLhuMpj5tEREJk+gEbegJ2v5PcXb3aefUpy0iIke/aAXt6Qsg2+JNZ4qax0VEJFyiFbTr/RHkfhN56Ve+9J62iIiEQbSC9rRTAOsejBaLGal4TDVtEREJhWgF7VQNTDqu92tfiZje0xYRkVCIVtCGfkeQq3lcRETCIIJBewHs3QLZNqBU01bQFhGRo1/0gvb0BYCDna8BkE7GFbRFRCQUohe0S9OZ+j/TmU7E9J62iIiEQvSC9oQ5kKrree0rESNbUE1bRESOftEL2t3TmXojyL2atoK2iIgc/aIXtKHXdKbpRFyvfImISChEM2hPXwBdB+HAVjWPi4hIaEQzaJemM92+Xs3jIiISGtEM2tNO9dY7Nug9bRERCY1oBu10LUycBzvWqU9bRERCo6KgbWaXmtkbZrbJzG7p5/wPzWytv2w0s/1l5wpl5x4OMvEj4g9G0zSmIiISFonBLjCzOHAH8GGgCVhtZg87514tXeOc+29l138JWFz2iA7n3KLgkhyQ6Qvh9UeoOTar5nEREQmFSmraZwObnHNbnHNZYAVw5WGuvwa4L4jEHVH1pwGOmbk/KGiLiEgoVBK0ZwJby/ab/GOHMLM5wDzgt2WHM2a2xsyeN7OPDTulQfOnM53RuYVC0ZHXa18iInKUG7R5fIiuBh50zpWP7JrjnGs2s+OA35rZOufc5r43mtkyYBlAfX09jY2NgSWqtbX10Oe5Ih+IZ6javgY4ld888STphAX2mWNZv/kpI6I8DZ7yNFjKz+ANJ08rCdrNwOyy/Vn+sf5cDdxYfsA51+yvt5hZI15/9yFB2zm3HFgOsGTJEtfQ0FBB0irT2NhIv8/bfDontu8CYMqJZ3DmnEmBfeZYNmB+yrApT4OnPA2W8jN4w8nTSprHVwMnmtk8M0vhBeZDRoGb2XxgIvBc2bGJZpb2t6cA5wGv9r131ExfwNT2TUyqTvJ3//HmaKdGRETksAYN2s65PHAT8BjwGvCAc26Dmd1mZleUXXo1sMI558qOnQKsMbOXgSeA75WPOh919adhnQf483NqePrN3Ty/Zc9op0hERGRAFfVpO+dWAiv7HLu1z/43+7nvWWDhCNJ3ZPnTmX5i5gH+ri7N3z6+kfv/9BzM1LctIiJHn2jOiFbiT2ea3rOBmy46gd+9tZen39w9yokSERHpX7SDdmYcTJgDOzZw1VmzmTmhir95/A16t/CLiIgcHaIdtMGbGW37etKJOF+5+ERebjrAf7y2c7RTJSIicggF7frTYO9m2PQf/D/vm8ncydX8zeNvUCyqti0iIkcXBe3Fn4YpJ8H//gSJR/+cP79wFq9vb2Hl+m2jnTIREZFeFLQnzIZlT8K5N8Gau7n8/3ySj03eyt+u2qipTUVE5KiioA2QzMAl34EbHsFcgR+2fY0/3vcTfvXiW6OdMhERkW4K2uXmngdfeBYWX8cXEw+z8NGPk3v3ldFOlYiICKCgfah0HXbl/+SV85czvriP2P+6EJ7+GyjkRztlIiIScQraA1h44X/m5ml30Whnw29ug79bAKtuhZ2vjXbSREQkohS0B2BmLLv0LD7bfiOrzvgHmLEInv1H+NE5cNcF8MJd0Ka5ykVE5L2joH0Y/+n4Kfyn46fwtQ3H0P7H/xv+7A249HvgivDof4e/OQnu+xS89ivIZ0c7uSIiMsYpaA/iz5aexO7WLH/6sxf5/d4EnPMF+K9PewPWzvkCNK+B+6+D20+AFdfC6h/Dns2gqVBFRCRgFf3KV5SdOWcS/98fncr//O2bfPxHz3LeCZO5seEEzj3+VGzpt+Hib8KWJ+C1h2FzI7z+a+/GCcfC8RfBcRfCvPOhetJofg0RERkDFLQr8NkPzOPqs2bzixfe4X89vYVP/fgFFh87gRsbTuDiU6ZhJ34YTvywV7veuwU2/xY2PwHrH4IX7wEMjlkMx54LM98HM8+EiXNBPwEqIiJDoKBdoZp0gs+ffxyfPncOD77YxJ1PbuZz965h/vQ6vnjhCVy+cAbxmMHk473l7M9DIQfNL3oBfEsjrPkJPH+H98CqSV7w7l7eBzVTRvU7iojI0U1Be4gyyTjXnTOHq8+aza9eeZcfPbGZL9/3e77/6OtcumA6l5w2nTPnTPQCeDwJx57jLRd+zQviO1/1Annzi9D8Emz6D8Dv/x43C6ae3LNM8ddqWhcRERS0hy0Rj/HxxbO48oyZPP7qdu5fvZWfPfc2P3nmD0yuSfGhU+pZelo9550whUwy7t0UT8KMM7xlyZ94x7paYNvLXhDfvg52vQFvPwv5jp4Pq5nqB/CTYOopMG0+TDtVNXMRkYhR0B6hWMy4dMEMLl0wg9auPE++sYvHNmxn5bpt3L9mK9WpOA0nT2XpqdM59/jJ1I/L9H5Aug7mfsBbSopFOLDVC+C73/DWu96Adf8CXQd6rqueAtNOganzvfW0U2DyiV4wV3+5iMiYo6AdoNp0gstPn8Hlp88gmy/y3JY9PL5hO6te3cHKddsBmDE+w6LZE7qXhbPGU53q858hFoOJc7zlpKU9x52Dlu1eE/uu173Z2Xa9Di+vgGxLz3WJDIybCeNnwfjZ3nrC7J79cTO9H0kREZFQUdA+QlKJGBecNJULTprKt65cwLrmA7z49j7Wbt3P2q37eXS9F8TjMeOk+joWzZ7AGbPGM3/GOE6qrz00kINXex43w1tOuLjnuHNwoMkL4Hv/AAfe8fYPNMHm33iBnj7vjddM6x3IJxzbs107DVK1kKpRjV1E5CiioP0eiMWMM2ZP4IzZE7qP7Wnt4uWm/fz+HS+I//qVd7nvd+8AXpycM6mak6fXMX/6OOZPr2P+jHEcO6naG+DWl5kXgCfM7j8B+S44+K4fyLd66/1+YN+xATY+BvnOfm40L3in6yBd62/XclpLDtpXwviZPTX6cTOhbgbE9U9KRORI0f9hR8nk2jQXza/novn1ABSLjq372nl9ewuvb2vhjR0HeX1bC4+/uqN7crVMMsZxU2qZN7WG46fUcNzUWuZNqeG4qTXUZZIDf1giDZPmeUt/nIO23V4Nff9WaN8D2VboavUGymVbvO2st1/d/i6sXd+7SR7AYlA73QvmtfWQmQBVE6Bq4qFL9SSvtq9mehGRiiloHyViMWPO5BrmTK7hktOmdx/vyBZ4c2dLdzDfsruV9c0HeHTdNoplLd5T69LMm1LD3MnVHDOhipn+csyEKmZMyJBOxAf+cDOoneotM88cNK2rGxtpaGiAzgNwoBkONnu19oPN/n6T10zfsc9bykfC95WZAHXTvSb52ulQV++vp3uBvTzQp+q8/n4RkYhS0D7KVaXinD5rAqfPmtDreFe+wDt72tmyu40tu9rYsquVLbvbaHxjFztbug55ztS6tB/EM9SPyzCtLkP9uDTT6jJMG5emvi7DuKoENpQ+7Mx4b6k/9fDX5TqgYz907u8J5G27oXUntG6H1h3QsgO2Pu+tC4emH/Bq8pnymvuEnjSkx/VsZ8Z712XG9wT+zAQFfBEJvYqCtpldCvw9EAd+7Jz7Xp/zNwC3A83+oX90zv3YP3c98Ff+8W87534aQLojL52Ic2J9HSfW1x1yritfYPuBTpr3d9C8r4N393fy7v4Omvd38Pq2Fp7auJvWrnw/z4wxbVyaqbVpptalmVLrL3WlYymm1KbpzDucc5UH+GSVt4ybMfi1znnBvWUHdOz1gn0p0JcH/Y590L7Xq9F3HfSuK+YGfq7FvFnoqid7gby0LtXgS/32aX+7+1hZf37sMK0VIiLvgUGDtpnFgTuADwNNwGoze9g592qfS+93zt3U595JwDeAJXjDl1/0790XSOqlX+lEvLupfSBtXXl2tnSx82AnO/x1aX9Xaxdv7W5n9Vv72Nee7fcHy1KN/86k6hQTqpNMqkkxsTrFxJqkt65OMakmxeRaf12TZlJNilSigpquWU9Neiic8wbTdb97zZYAABLkSURBVB7ovbTv9YJ/+56yxQ/2TWu84D9Qzb6vZE3vQF4K7qlqiKe9yXMSaYinetbxlFdgqZ7svT9fM83rCqiaqEKAiAxZJTXts4FNzrktAGa2ArgS6Bu0+3MJsMo5t9e/dxVwKXDf8JIrQalJJ5iXTjBvysCBHSBfKLK3LcvOli52t3axuzXL715+lYkzZrOvLcvethz727O8tv0g+9qy7O/IDfirpHWZBJNrvEA+qSbtbdemmOwH/dL2JH+pSsYrr82b9dTo66YPfn2vL5ntHmTXa8mWtksD8lq9Wn35/v63IdfuPaPQBYVsz7YrHia9MW9ynBpvLMGpLVnYe1/Pd0hWQaK0nfEKDOWD+EpN/hqtLxIplfzFzwS2lu03Ae/v57pPmNn5wEbgvznntg5w78xhplVGQSIeY9q4DNPKZnKb0rKJhoZT+r2+UHQc7Mixpy3L3rYse9u6vO3WLHvasv7xLpr2tfNK0372tWfJFfqP8sm4UZtOUJdJ+uvS0rM/virJuKqkt8746yrveF0m2f8rcod8yRQkJgU/x3sh7wXxXIdXw2/b6fXjt+3yltJ2605q2rbD203eoL1ch1cQOFzQL8mM94P5JO+9+lLtPuGvy1sAEmm/lWC8t86M88YCdG+P9wsLGfX/ixylgiqm/wq4zznXZWZ/CvwUuGgoDzCzZcAygPr6ehobGwNKGrS2tgb6vKgbSn5m8EppM+PAOH/plsC5OB15aMk6b8m57u32HHQUHB25LjryXezf79iWh4688xcYIN53q0pATdKoSRq1Saju3jaqk1CdMDIJoyoBmbiRSUBVwrq3kzGGNjhvUJO8JXZyr/xobW2ltra25zLnMJcnVswSL3QRL3SRyLeQzLWSyB8kmWslmWvxj7WQ7Ggh1raHWDHn35cjVsxjzlt7Sxcxd+hYhv4ULUkhnqIYS1GMpSnGkhRjKQrxNIV4FYV4pmyp6rVduq4YS5etU732i7HUEZ+4R3/3wVJ+Bm84eVpJ0G4GymftmEXPgDMAnHN7ynZ/DPyg7N6GPvf2m0Ln3HJgOcCSJUtcQ0NDf5cNS2PpFSUJxNGSn8452rMFDnbmONCR42BH3l97+32X/e1Z9nTk2NLi7eeLg0R8IBEzqlNxqlMJqtNxalIJqlJxalJxqtMJqpNxatIJxvktAHW91j3bNf61sQFq/u9Znua7vKb9zgN+0/9Bf/+gt51rh1wnsXwHsVynX/MvW+c6INcG2V3Q3tbzPr8rDC0dFvMG9yWrvRaCQ5aeyXy6J/gp309W97QeJNJei0Ii47eaZCCWoPHJJ4+Kf6djxdHydz+WDCdPKwnaq4ETzWweXhC+GvhU+QVmNsM5t83fvQJ4zd9+DPiumZVGFS0FvjakFIoMwMyoSXsBccb4qiHd65yjLVvgYEeOtq48rV152roK/jpPWzbfs91VoD2bpz1boD1boK0rz+7WLG1722nv8vZb+hmN358qP8jXpL2CQCn4tx/o5Ne7XqYmFe/+TtUpr5BQk/YKDNVJ756qVNy7P+ltVzTAr6QU5IL8hTjnvG6ArB/ES837uU5/3eENEixtZ9u87Wxbn6XVGxi4f6u/3zK8AgGAxfhALAVrxvcuGJRvx1MQS/hLvGdt/nY85XUblCYJ6l77rxRqYiAZBYMGbedc3sxuwgvAceBu59wGM7sNWOOcexj4spldAeSBvcAN/r17zexbeIEf4LbSoDSR0WTm9ZfXpoPpISoWHa3ZPC2deVo6c73WBzvzdGS94O8VCLxCQKkwcKA9y67WIu9u2k2bXyiopBWgpNQaUAr0temEVyDwCwc1/vesSsZJJ2OkE3HSiZi3JOOk4jH/eIyqZJyqVE+BoCoVpyoZP/zYALOewkDQ4wJKbwV0tfaemS/b7g32y/tL+Xa+C/KdbPvDRmZPm+gXENr9+1q9eQGybV5Bo1iAYt5bu9K2vwwmkfFaAJLVfstAde/tUiEhkfEXvzUg2Wc/liwrNPQtRPhLojQ+IVW2ndRvA0RQRf/Hcs6tBFb2OXZr2fbXGKAG7Zy7G7h7BGkUOerFYsa4jDcYDoZW64fezWTOObKFIu1+zb896607/GDfkSt01/rbu/K05/x1tkBbWeGgeX8HbV152v1Wg85cBQPbBpBKxLxugmRPS0BtWaGg1CJQm46TSZYvMTIJr7CQScbJJOJUpbztUgEhkxi426DXWwFMHVKaNzc2MnskzbmFnNeN0D0xUNkEQaX9UitBrr2nMFGaBjjb7r9Z0HX4OQRGIu4XlpLVftdBWddCqubQ7oR4qqcbId63e6FvwaBsSaSJ5zu8nw3WIMVRpfdFRI4yZubXhuNMrEkF9txi0SsMdOWKdOULdOWL/uJv54p05grdhYKOXIGObJ6ObJH2XJ7ObKG7JaDUdbC7tatXN0K2MLyCQToR667VV/kBv7qspt93XZ3qKRiUH08ne1oLdrQV2X6g0ys4JL3WhSENKown/XfrA+hKKOTLWgM6/aXL6y4o9qnhH7KfL2tNyHotBN3b/jNL3Q1drd66dYdfcCjrfhhON0OZDwI8Q59xCLW9xyIkqvxChP8WQtLfT1T1tDCUFQQO2U74LQjdBYay7VhCLQsoaItERixmZGJeoIPD/MDMCGTzRTpyBbpyBTpzRTrzBTpL2zl/O1+ks1QoyBXoyBa6Cwsd2QLt/v2lFoZdLV3d50rnC5V2Hzz9m1676USspwXAr/lnkl43gbff+3x1yms98LobeloVSt0RGb+7IZWIkYrHSPndDol4n9poPOEtqcPPi3BElRccClm/4OCvC9me46Ul3+W1NhS89ebX13H87BmHFgayrd5AxoPbygYuli1B6lP794J6n26DRMorWCSrytZl26WCRa+CQrqfY6nDFCBGb2IkBW0RCUwq4QUuqo5MoaAkm/cLBFmvQFAqAJSCf2e2wO/XbWDeCSf1HMsV/cJE/wWKAx05dpadLxUUhtN6EDP8AO6PHyjrJigdK9X+E/EYyZgRjxmJeIyEv52MG/FYjFTcqE77gxb9wkNVqvd+6VmlZ/fb3TDCgsPW9kaOH2p3Q2lMQr6zJ5iXCgL9TUiU7/JaFroLD7ne270KEmX3dZ/zn9t5wB8Q2dEzADLXPqzv3S+LwyXfhXP+a3DPrJCCtoiETqlwMO4wP0lbtecNGs4+dsSflc0XvYGDftdAW9k4g85cgWy+SLZQJOt3N2RLS8ErJPTqhvALC125Ii2d3v35oiNfLJIvOPJFR6HoyBWKFIqOfMENq9CQise6CwvlLQHJhHlrv1Wg1DqQ8gchdndLlLocUj37G3flSW/e0z1osdeARr9Akor3KTCUj0kY+lCPYJUKEOWDGMtbHHodKys8dBcU+hQkjlk8Kl9DQVtE5DC8oJZiQvXofH6x6OjMF/q8etjz9kFbl1cw6OwuIPSMUejK+y0M+QK5QpFcwXUXKFo68+QKPQWMjmxPF8WAby+8+Pyg6U0l+gR1v+CQSXqFhWTcSMRifkuCdbc0JPzCRJXfPVEqRGRSXrdFadBiqfui9MxSgSFTVnAw62dSpF6DGsNLQVtE5CgWi5k3uU8qAaTfk8/MFYq0+2MN2v2xBM++sJrTTl/UexBjWUtCqdWhv4JD6XzOHwjZWiyQL3itC7lSK0Oh2D1QsiN3mILDEJmB4QVx8/dTca8QkE74bzCUFxKSPd0N5QWQnm1vOWveJOZPHzfYxwdOQVtERHpJxmOMr4oxvmxsws6Jcc49fvJ7loZcoXjIIMaOXFmrQllXQ/k6my/inPezkjiH81Y4XPfxbL7nTYlSIaEz53V57G7N9lsAyeZ7d1N886OnKmiLiIgAflN6jLqjZOK50vwJpRaE6tTojCBX0BYRERlE+fwJjGJBQlPbiIiIhISCtoiISEgoaIuIiISEgraIiEhIKGiLiIiEhIK2iIhISChoi4iIhISCtoiISEgoaIuIiISEgraIiEhIKGiLiIiEhIK2iIhISChoi4iIhISCtoiISEgoaIuIiISEgraIiEhIVBS0zexSM3vDzDaZ2S39nP9/zexVM3vFzH5jZnPKzhXMbK2/PBxk4kVERKIkMdgFZhYH7gA+DDQBq83sYefcq2WX/R5Y4pxrN7MvAD8ArvLPdTjnFgWcbhERkcippKZ9NrDJObfFOZcFVgBXll/gnHvCOdfu7z4PzAo2mSIiIlJJ0J4JbC3bb/KPDeSzwKNl+xkzW2Nmz5vZx4aRRhEREaGC5vGhMLPrgCXABWWH5zjnms3sOOC3ZrbOObe5n3uXAcsA6uvraWxsDCxdra2tgT4v6pSfwVOeBk95GizlZ/CGk6eVBO1mYHbZ/iz/WC9m9iHgL4ELnHNdpePOuWZ/vcXMGoHFwCFB2zm3HFgOsGTJEtfQ0FDxlxhMY2MjQT4v6pSfwVOeBk95GizlZ/CGk6eVNI+vBk40s3lmlgKuBnqNAjezxcBdwBXOuZ1lxyeaWdrfngKcB5QPYBMREZEKDVrTds7lzewm4DEgDtztnNtgZrcBa5xzDwO3A7XA/29mAO84564ATgHuMrMiXgHhe31GnYuIiEiFKurTds6tBFb2OXZr2faHBrjvWWDhSBIoIiIiHs2IJiIiEhIK2iIiIiGhoC0iIhISCtoiIiIhoaAtIiISEgraIiIiIaGgLSIiEhIK2iIiIiGhoC0iIhISCtoiIiIhoaAtIiISEgraIiIiIaGgLSIiEhIK2iIiIiGhoC0iIhISCtoiIiIhoaAtIiISEgraIiIiIaGgLSIiEhIK2iIiIiGhoC0iIhISCtoiIiIhoaAtIiISEgraIiIiIaGgLSIiEhIVBW0zu9TM3jCzTWZ2Sz/n02Z2v3/+BTObW3bua/7xN8zskuCSLiIiEi2DBm0ziwN3AB8BTgWuMbNT+1z2WWCfc+4E4IfA9/17TwWuBk4DLgV+5D9PREREhqiSmvbZwCbn3BbnXBZYAVzZ55orgZ/62w8CF5uZ+cdXOOe6nHN/ADb5zxMREZEhqiRozwS2lu03+cf6vcY5lwcOAJMrvFdEREQqkBjtBJSY2TJgmb/bamZvBPj4KcDuAJ8XdcrP4ClPg6c8DZbyM3jleTqnkhsqCdrNwOyy/Vn+sf6uaTKzBDAe2FPhvQA455YDyytJ9FCZ2Rrn3JIj8ewoUn4GT3kaPOVpsJSfwRtOnlbSPL4aONHM5plZCm9g2cN9rnkYuN7f/mPgt8455x+/2h9dPg84EfjdUBIoIiIinkFr2s65vJndBDwGxIG7nXMbzOw2YI1z7mHgJ8DPzGwTsBcvsONf9wDwKpAHbnTOFY7QdxERERnTKurTds6tBFb2OXZr2XYn8MkB7v0O8J0RpDEIR6TZPcKUn8FTngZPeRos5Wfwhpyn5rVii4iIyNFO05iKiIiExJgO2oNNvyqDM7O7zWynma0vOzbJzFaZ2Zv+euJopjFMzGy2mT1hZq+a2QYz+4p/XHk6TGaWMbPfmdnLfp7+D//4PH9a5U3+NMup0U5rmJhZ3Mx+b2a/9veVnyNgZm+Z2TozW2tma/xjQ/67H7NBu8LpV2Vw9+BNQVvuFuA3zrkTgd/4+1KZPPBnzrlTgXOAG/1/l8rT4esCLnLOnQEsAi41s3PwplP+oT+98j686Zalcl8BXivbV36O3IXOuUVlr3kN+e9+zAZtKpt+VQbhnHsK742AcuXT1v4U+Nh7mqgQc85tc8695G+34P1PcSbK02FznlZ/N+kvDrgIb1plUJ4OiZnNAi4HfuzvG8rPI2HIf/djOWhrCtUjp945t83f3g7Uj2Ziwsr/NbzFwAsoT0fEb8pdC+wEVgGbgf3+tMqgv/+h+jvgvwNFf38yys+RcsDjZvaiPwMoDOPv/qiZxlTCyTnnzEyvIAyRmdUC/wJ81Tl30KvIeJSnQ+fP/7DIzCYAvwTmj3KSQsvM/gjY6Zx70cwaRjs9Y8gHnHPNZjYNWGVmr5efrPTvfizXtCueQlWGbIeZzQDw1ztHOT2hYmZJvID9c+fcQ/5h5WkAnHP7gSeAc4EJ/rTKoL//oTgPuMLM3sLrVrwI+HuUnyPinGv21zvxCpZnM4y/+7EctCuZflWGp3za2uuBfxvFtISK3zf4E+A159zflp1Sng6TmU31a9iYWRXwYbyxAk/gTasMytOKOee+5pyb5Zybi/f/zd86565F+TlsZlZjZnWlbWApsJ5h/N2P6clVzOwyvL6Z0vSroz0zW+iY2X1AA96v0ewAvgH8K/AAcCzwNvCfnXN9B6tJP8zsA8DTwDp6+gu/jtevrTwdBjM7HW8QTxyvIvKAc+42MzsOr6Y4Cfg9cJ1zrmv0Uho+fvP4nzvn/kj5OXx+3v3S300Av3DOfcfMJjPEv/sxHbRFRETGkrHcPC4iIjKmKGiLiIiEhIK2iIhISChoi4iIhISCtoiISEgoaIuIiISEgraIiEhIKGiLiIiExP8F2aOWAA3Dno4AAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "def plot_learning_curves(history):\n", - " pd.DataFrame(history.history).plot(figsize=(8, 5))\n", - " plt.grid(True)\n", - " plt.gca().set_ylim(0, 2)\n", - " plt.show()\n", - "plot_learning_curves(history)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0.43126633763313293" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "model.evaluate(x_test_scaled, y_test, verbose=0)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/tf13_keras_regression-hp-search-sklearn.ipynb b/tf13_keras_regression-hp-search-sklearn.ipynb deleted file mode 100644 index 8054920..0000000 --- a/tf13_keras_regression-hp-search-sklearn.ipynb +++ /dev/null @@ -1,4958 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2.2.0\n", - "sys.version_info(major=3, minor=6, micro=9, releaselevel='final', serial=0)\n", - "matplotlib 3.2.1\n", - "numpy 1.18.5\n", - "pandas 1.0.4\n", - "sklearn 0.23.1\n", - "tensorflow 2.2.0\n", - "tensorflow.keras 2.3.0-tf\n" - ] - } - ], - "source": [ - "import matplotlib as mpl\n", - "import matplotlib.pyplot as plt\n", - "%matplotlib inline\n", - "import numpy as np\n", - "import sklearn\n", - "import pandas as pd\n", - "import os\n", - "import sys\n", - "import time\n", - "import tensorflow as tf\n", - "\n", - "from tensorflow import keras\n", - "\n", - "print(tf.__version__)\n", - "print(sys.version_info)\n", - "for module in mpl, np, pd, sklearn, tf, keras:\n", - " print(module.__name__, module.__version__)" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - ".. _california_housing_dataset:\n", - "\n", - "California Housing dataset\n", - "--------------------------\n", - "\n", - "**Data Set Characteristics:**\n", - "\n", - " :Number of Instances: 20640\n", - "\n", - " :Number of Attributes: 8 numeric, predictive attributes and the target\n", - "\n", - " :Attribute Information:\n", - " - MedInc median income in block\n", - " - HouseAge median house age in block\n", - " - AveRooms average number of rooms\n", - " - AveBedrms average number of bedrooms\n", - " - Population block population\n", - " - AveOccup average house occupancy\n", - " - Latitude house block latitude\n", - " - Longitude house block longitude\n", - "\n", - " :Missing Attribute Values: None\n", - "\n", - "This dataset was obtained from the StatLib repository.\n", - "http://lib.stat.cmu.edu/datasets/\n", - "\n", - "The target variable is the median house value for California districts.\n", - "\n", - "This dataset was derived from the 1990 U.S. census, using one row per census\n", - "block group. A block group is the smallest geographical unit for which the U.S.\n", - "Census Bureau publishes sample data (a block group typically has a population\n", - "of 600 to 3,000 people).\n", - "\n", - "It can be downloaded/loaded using the\n", - ":func:`sklearn.datasets.fetch_california_housing` function.\n", - "\n", - ".. topic:: References\n", - "\n", - " - Pace, R. Kelley and Ronald Barry, Sparse Spatial Autoregressions,\n", - " Statistics and Probability Letters, 33 (1997) 291-297\n", - "\n", - "(20640, 8)\n", - "(20640,)\n" - ] - } - ], - "source": [ - "from sklearn.datasets import fetch_california_housing\n", - "\n", - "housing = fetch_california_housing()\n", - "print(housing.DESCR)\n", - "print(housing.data.shape)\n", - "print(housing.target.shape)" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(11610, 8) (11610,)\n", - "(3870, 8) (3870,)\n", - "(5160, 8) (5160,)\n" - ] - } - ], - "source": [ - "from sklearn.model_selection import train_test_split\n", - "\n", - "x_train_all, x_test, y_train_all, y_test = train_test_split(\n", - " housing.data, housing.target, random_state = 7)\n", - "x_train, x_valid, y_train, y_valid = train_test_split(\n", - " x_train_all, y_train_all, random_state = 11)\n", - "print(x_train.shape, y_train.shape)\n", - "print(x_valid.shape, y_valid.shape)\n", - "print(x_test.shape, y_test.shape)\n" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from sklearn.preprocessing import StandardScaler\n", - "\n", - "scaler = StandardScaler()\n", - "x_train_scaled = scaler.fit_transform(x_train)\n", - "x_valid_scaled = scaler.transform(x_valid)\n", - "x_test_scaled = scaler.transform(x_test)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/10\n", - "363/363 [==============================] - 1s 2ms/step - loss: 1.3733 - val_loss: 0.7411\n", - "Epoch 2/10\n", - "363/363 [==============================] - 1s 3ms/step - loss: 0.6284 - val_loss: 0.6279\n", - "Epoch 3/10\n", - "363/363 [==============================] - 1s 3ms/step - loss: 0.5568 - val_loss: 0.5713\n", - "Epoch 4/10\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.5257 - val_loss: 0.5383\n", - "Epoch 5/10\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4986 - val_loss: 0.5133\n", - "Epoch 6/10\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4869 - val_loss: 0.5010\n", - "Epoch 7/10\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4706 - val_loss: 0.4821\n", - "Epoch 8/10\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4531 - val_loss: 0.4714\n", - "Epoch 9/10\n", - "363/363 [==============================] - 1s 2ms/step - loss: 0.4455 - val_loss: 0.4670\n", - "Epoch 10/10\n", - "363/363 [==============================] - 1s 3ms/step - loss: 0.4368 - val_loss: 0.4615\n" - ] - } - ], - "source": [ - "# RandomizedSearchCV\n", - "# 1. 因为是sklearn的接口,转化为sklearn的model\n", - "# 2. 定义参数集合\n", - "# 3. 搜索参数\n", - "\n", - "def build_model(hidden_layers = 1,\n", - " layer_size = 30,\n", - " learning_rate = 3e-3):\n", - " model = keras.models.Sequential()\n", - " #因为不知道第一个输入的shape是多大的,因此我们需要单独从for循环里拿出来,for循环里的是输出再次作为输入\n", - " model.add(keras.layers.Dense(layer_size, activation='relu',\n", - " input_shape=x_train.shape[1:]))\n", - " for _ in range(hidden_layers - 1):\n", - " model.add(keras.layers.Dense(layer_size,\n", - " activation = 'relu'))\n", - " model.add(keras.layers.Dense(1))\n", - " optimizer = keras.optimizers.SGD(learning_rate)\n", - " model.compile(loss = 'mse', optimizer = optimizer)\n", - " return model\n", - "\n", - "#KerasRegressor返回一个sk的model,build_fn是一个回调函数\n", - "sklearn_model = tf.keras.wrappers.scikit_learn.KerasRegressor(\n", - " build_fn = build_model)\n", - "callbacks = [keras.callbacks.EarlyStopping(patience=5, min_delta=1e-2)]\n", - "#下面只是先对sk封装tf模型的一个测试\n", - "history = sklearn_model.fit(x_train_scaled, y_train,\n", - " epochs = 10,\n", - " validation_data = (x_valid_scaled, y_valid),\n", - " callbacks = callbacks)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeMAAAEzCAYAAAACSWsXAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXgc1Z3u8e/pTa1dsmzJtmTJBhuMsYwtzA5G7FsCyRAGCDBAAs5kJTO5TJxkLpObm5lM4sxkcp94QpgECITEeAhJHHBCmMRmNcbG+4IXjBd5kzftai2tc/+oltRa3ZJaXXLr/TxPP13Vfbr6p/Pw8Pqcqj5lrLWIiIiIezxuFyAiIjLaKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXHbKMDbGPGmMqTTGbO7jfWOM+X/GmF3GmI3GmLL4lykiIpK8YhkZPw3c2M/7NwHTIo/5wI+HXpaIiMjoccowtta+Dpzop8ltwDPW8Q6QY4yZEK8CRUREkl08zhkXAvuj9isir4mIiEgMfIn8MmPMfJypbFJTU8+fNGlS3I7d1taGxxP/69GONFha2yyFGbrWrd1w9bV0pX5ODPVzYqifYceOHcesteN6ey8eYXwAiE7VoshrPVhrnwCeAJg7d65ds2ZNHL7esWLFCsrLy+N2vHaLlu9i4SvbWfXYdeSkBeJ+/NPRcPW1dKV+Tgz1c2Kon8EYs7ev9+Lxz5SlwN9Erqq+GKi21h6Kw3FHhLLiXADW7atyuRIREUlWpxwZG2N+BZQDY40xFcA/AX4Aa+3jwDLgZmAX0AA8OFzFuuG8Sdl4PYa1+05y1fR8t8sREZEkdMowttbefYr3LfD5uFU0wqQFfEwfn8nafSfdLkVERJJUQi/gOl2VFefy4toKwm0Wr8e4XY6IiCtaWlqoqKggFAoN+LPZ2dls27ZtGKoaeYLBIEVFRfj9/pg/ozCOQVlJDs++s5cdR2o5Z0KW2+WIiLiioqKCzMxMJk+ejDEDG5jU1taSmZk5TJWNHNZajh8/TkVFBVOmTIn5c6P7OvMYtV/EpalqERnNQqEQeXl5Aw7i0cQYQ15e3oBnDxTGMSgek0ZeeoC1e3VFtYiMbgriUxtMHymMY2CMYU5xLus0MhYRcVVGRobbJQwLhXGMykpy2H2snpP1zW6XIiIiSUZhHKOOxT/2a3QsIuI2ay2PPvooM2fOpLS0lOeffx6AQ4cOMW/ePGbPns3MmTN54403CIfDPPDAAx1tf/CDH7hcfU+6mjpGs4oii3/sreLq6QVulyMiMqq9+OKLrF+/ng0bNnDs2DEuuOAC5s2bxy9/+UtuuOEGvvGNbxAOh2loaGD9+vUcOHCAzZs3A1BVNfKu/1EYxygt4OOcCVr8Q0QE4P/8fgtbD9bE3D4cDuP1evttM2NiFv/00XNjOt6bb77J3XffjdfrpaCggCuvvJLVq1dzwQUX8KlPfYqWlhY+9rGPMXv2bM444wx2797NF7/4RW655Rauv/76mOtOFE1TD0BZcS4b9lcRbrNulyIiIr2YN28er7/+OoWFhTzwwAM888wz5ObmsmHDBsrLy3n88cd56KGH3C6zB42MB6CsOJdnVu5l++FaZkzU4h8iMnrFOoJtF+9FP6644gp+8pOfcP/993PixAlef/11Fi5cyN69eykqKuLhhx+mqamJtWvXcvPNNxMIBLj99ts5++yzuffee+NWR7wojAcgevEPhbGIiHs+/vGPs3LlSs477zyMMXzve99j/Pjx/PznP2fhwoX4/X4yMjJ45plnOHDgAA8++CBtbW0AfOc733G5+p4UxgMwaUwqYzMCrN13knsvLnG7HBGRUaeurg5w1n9YuHAhCxcu7PL+/fffz/3339/jc2vXrk1IfYOlc8YD0Ln4x8i7Ek9ERE5fCuMBKivO5cNj9ZzQ4h8iIhInCuMBKivOAdDSmCIiEjcK4wGaVZSDz2P0e2MREYkbhfEApQa8nDMhS3dwEhGRuFEYD0JZcQ4bKqpoDbe5XYqIiCQBhfEglJXk0tAcZvuRWrdLERGRJKAwHoTOxT80VS0iMpL1d//jPXv2MHPmzARW0zeF8SAU5aYyNiOFdXt1EZeIiAydwngQjDGUFefoimoRkQRbsGABixYt6tj/5je/ybe//W2uueYaysrKKC0t5Xe/+92AjxsKhXjwwQcpLS1lzpw5LF++HIAtW7Zw4YUXMnv2bGbNmsXOnTupr6/nlltu4bzzzmPmzJkd91IeCi2HOUhlJbn8aesRjtc1kZeR4nY5IiKJ9YcFcHhTzM1Tw63gPUXkjC+Fm/613yZ33nknX/7yl/n85z8PwJIlS3jllVf40pe+RFZWFseOHePiiy/m1ltvxRgTc32LFi3CGMOmTZt4//33uf7669mxYwePP/44jzzyCPfccw/Nzc2Ew2GWLVvGxIkTefnllwGorq6O+Xv6opHxILWfN9bSmCIiiTNnzhwqKys5ePAgGzZsIDc3l/Hjx/P1r3+dWbNmce2113LgwAGOHDkyoOO++eabHXdzmj59OiUlJezYsYNLLrmEf/mXf+G73/0ue/fuJTU1ldLSUl599VW++tWv8sYbb5CdnT3kv0sj40GaVZTdsfjHtTMK3C5HRCSxTjGC7a4xjrdQvOOOO3jhhRc4fPgwd955J8899xxHjx7lvffew+/3M3nyZEKhUFy+65Of/CQXXXQRL7/8MjfffDM/+clPuPrqq1m7di3Lli3jH//xH7nmmmt47LHHhvQ9CuNBCvq9zJiYpfPGIiIJduedd/Lwww9z7NgxXnvtNZYsWUJ+fj5+v5/ly5ezd+/eAR/ziiuu4LnnnuPqq69mx44d7Nu3j7PPPpvdu3dzxhln8KUvfYl9+/axceNGpk+fzpgxY7j33nvJycnhpz/96ZD/JoXxEJQV5/L86v20htvweTXjLyKSCOeeey61tbUUFhYyYcIE7rnnHj760Y9SWlrK3LlzmT59+oCP+bnPfY7PfvazlJaW4vP5ePrpp0lJSWHJkiU8++yz+P3+junw1atX8+ijj+LxePD7/fz4xz8e8t+kMB6COcU5PP32Ht4/XMvMwqGfMxARkdhs2tR58djYsWNZuXJlr+3a73/cm8mTJ7N582YAgsEgTz31VI82CxYsYMGCBV1eu+GGG7jhhhsGU3afNJwbgs6LuDRVLSIig6eR8RAU5aYyLjOFtfuquO8St6sREZHebNq0ifvuu6/LaykpKaxatcqlinpSGA+BFv8QERn5SktLWb9+vdtl9EvT1ENUVpzL3uMNHKtrcrsUEZFhZ611u4QRbzB9pDAeorISLf4hIqNDMBjk+PHjCuR+WGs5fvw4wWBwQJ/TNPUQlRZ2Lv5xnRb/EJEkVlRUREVFBUePHh3wZ0Oh0IAD6nQVDAYpKioa0GcUxkMU9Hs5d2IWa3UHJxFJcn6/nylTpgzqsytWrGDOnDlxrih5aJo6DuYU57KxoprWcJvbpYiIyGlIYRwHZSW5NLaEef9wrduliIjIaUhhHAdlxTkA+omTiIgMisI4DgpzUsnPTNF5YxERGRSFcRw4i3/kslY/bxIRkUFQGMdJWUkO+05o8Q8RERk4hXGctN80QlPVIiIyUArjOJlZmI3fazRVLSIiA6YwjpOg38uMidm6olpERAZMYRxHZcU5bKyookWLf4iIyADEFMbGmBuNMduNMbuMMQt6eb/YGLPcGLPOGLPRGHNz/Esd+cqKcwm1tPH+IS3+ISIisTtlGBtjvMAi4CZgBnC3MWZGt2b/CCyx1s4B7gL+M96Fng7a7+CkqWoRERmIWEbGFwK7rLW7rbXNwGLgtm5tLJAV2c4GDsavxNPHxOwgBVkpCmMRERkQc6r7UhpjPgHcaK19KLJ/H3CRtfYLUW0mAH8CcoF04Fpr7Xu9HGs+MB+goKDg/MWLF8fr76Curo6MjIy4HW+wfrQuxN6aNhZemeZ2KcNmpPR1slM/J4b6OTHUz3DVVVe9Z62d29t78bqF4t3A09bafzPGXAI8a4yZaa3tciWTtfYJ4AmAuXPn2vLy8jh9vXN7rngeb7B2enbzz8u2ce75lzAuM8XtcobFSOnrZKd+Tgz1c2Kon/sXyzT1AWBS1H5R5LVonwaWAFhrVwJBYGw8CjzdlJXophEiIjIwsYTxamCaMWaKMSaAc4HW0m5t9gHXABhjzsEJ46PxLPR0ce7E9sU/FMYiIhKbU4axtbYV+ALwCrAN56rpLcaYbxljbo00+wrwsDFmA/Ar4AF7qpPRSSro93LuxGzW7dVKXCIiEpuYzhlba5cBy7q99ljU9lbgsviWdvoqK87ll+/upSXcht+rdVVERKR/SophUFaSQ6iljW2HatwuRURETgMK42GgOziJiMhAKIyHwcScVMZnBXUHJxERiYnCeJiUleToimoREYmJwniYlBXnUnGykcrakNuliIjICKcwHiZzOs4ba6paRET6pzAeJjMLswh4PazTVLWIiJyCwniYpPi8nFuYpfPGIiJySgrjYVRWnMvGimqaW9tO3VhEREat5AjjxpMUVrwMbWG3K+mirDiXplYt/iEiIv1LjjDe8DzTdj0BT38ETnzodjUddAcnERGJRXKE8UWfYdv0L8ORLfDjy2DNkzAC7lMxITuVidla/ENERPqXHGFsDEfGXwWfexsmXQgv/R384naoOeh2ZcwpydWymCIi0q/kCON22UVw32/g5u/DvpXwnxfDhuddHSWXFedyoKqRyhot/iEiIr1LrjAGMAYufBj+9k0Ydw78Zj4suQ/qj7lSTlmxzhuLiEj/ki+M2+WdCQ8ug+u+BTtegUUXwbaXEl7GuROzCfg8Om8sIiJ9St4wBvB44bJHYP5rkDURnr8HfvO30Ji4YAz4PJQWZuu8sYiI9Cm5w7hdwQx46M8w7x9g4xL48aXwwV8S9vVlxTlsPKDFP0REpHejI4wBfAG4+hvw0KsQSIdnPw4vfwWa64f9q8uKc2lubWOrFv8QEZFejJ4wbld4PnzmdbjkC7D6Z87vkve9M6xfWVbi3MHpPU1Vi4hIL0ZfGAP4U+GGf4YHXgIbhidvhFcfg5bh+flRQVaQwpxUXVEtIiK9Gp1h3G7y5fDZt+H8++GtH8IT5XBow7B81ZziHNZpZCwiIr0Y3WEMkJIJH/0h3PMCNJ6E/7oaXvsehFvj+jVlxbkcrA5xuFqLf4iISFcK43bTroPPrYQZH4Pl/ww/uw6Obo/b4dvPG2uqWkREulMYR0sbA5/4GdzxNJzcA49fAW//CNqG/pOkGROySPF59HtjERHpQWHcm3M/Dp97B868Gv70Dfj5R5xwHoKOxT80MhYRkW4Uxn3JLIC7fwW3/Scc3uT8BOq9p4d004myklw2H6ihqTUcvzpFROS0pzDujzEw5x7niuvCMvj9I/DcHVBzaFCHKyvOoTncxpaDWvxDREQ6KYxjkTMJ7vsd3LQQ9rzp3Jpx438PeJRcVhy5iEvnjUVEJIrCOFYeD1w037k149hp8OJD8N/3Q/3xmA+RH1n8Y53u4CQiIlEUxgM1dip86hW45p/g/WXOKHn7H2L+eFlJri7iEhGRLhTGg+HxwhV/D/NXQEYB/Oou+O3nIFR9yo+WFedwqDrEoerGYS9TRERODwrjoRg/Ex7+C1zxv2DDr5wrrnev6PcjneeNNVUtIiIOhfFQ+QJwzf+GT78KvhR45jZY9ig0N/Ta/Jz2xT80VS0iIhEK43gpmgufeQMu+iy8+wQ8fjnsf7dHs4DPw6wiLf4hIiKdFMbxFEiDm/4V7v89hFvgyRvgf74JrU1dmpUV57JFi3+IiEiEwng4TJkHn30LZt8Db/7AuRPU4U0db88pzqU53MbmA1r8Q0REFMbDJ5gFt/0I7n4e6o/CE1fB6wsh3EpZSQ4A6zRVLSIiKIyH39k3OjedOOej8Jdvw5PXk9+0n6LcVJ03FhERQGGcGGlj4I6n4BNPwond8Pjl/F3mn1m354TblYmIyAigME6kmbc7o+QpV3J75Y/4QdP/5o3fP01TqPefQYmIyOigME60zPHwyeepuf4HTPce4Ir3HqHlX89k66JPUrXxZecqbBERGVV8bhcwKhlD1qWfou2Ce9n01u+pXvM8syqXk/Xiy9T/LovQ1FvIu+gumHyFs/SmiIgkNYWxizz+AKXlt0P57Xx45AQvvfLfZH2wlPL3fwPbf0VTSh6+0o/jLb0dJl3s3DlKRESSjsJ4hJhSMIYpf/MZakOf4sV3d7HrrRe5qGEF16x5Bu+anxLOmIB35l/BzL+CwvPBGLdLFhGROIkpjI0xNwI/BLzAT621/9pLm78GvglYYIO19pNxrHPUyAz6+Zt559B2+ddZseMhvvjGVlI/fJVba96hfNUT+N5ZBDnFcG4kmMfPUjCLiJzmThnGxhgvsAi4DqgAVhtjllprt0a1mQZ8DbjMWnvSGJM/XAWPFh6P4erpBVw9vYCdR+by85V7+Pp7O5jXtop7Gt/jvLd/hOet/4C8qZFgvh3yp7tdtoiIDEIsI+MLgV3W2t0AxpjFwG3A1qg2DwOLrLUnAay1lfEudDSbVpDJtz9WyqPXT2fJmjl8ceUe6huPcFfGBu4Jr6Hwje9jXv8e5M/oHDHnnel22SIiEqNYwrgQ2B+1XwFc1K3NWQDGmLdwprK/aa39Y1wqlA7ZaX4enncGn7p8Cv+z7QhPvTWZH+++giJ/Df8waTvXtr1F2vJvw/Jvw4TzOoM5p9jt0kVEpB/GWtt/A2M+AdxorX0osn8fcJG19gtRbV4CWoC/BoqA14FSa21Vt2PNB+YDFBQUnL948eK4/SF1dXVkZGTE7Xini/21bby6t4WVB1tpaYN5uSf4dNZqZje8TXbtTgCqs86mMv8Kjo67lOaUvCF/52jt60RTPyeG+jkx1M9w1VVXvWetndvbe7GE8SU4I90bIvtfA7DWfieqzePAKmvtU5H9PwMLrLWr+zru3Llz7Zo1awb6t/RpxYoVlJeXx+14p5sT9c0sXr2PZ1fu5VB1iOIxaXx+tpfb/O8S3P7byF2jDJRc6oyWz7kNMsYN6rtGe18nivo5MdTPiaF+BmNMn2Ecyw9XVwPTjDFTjDEB4C5gabc2vwXKI182FmfaevegK5YBG5Me4HPlU3n9H65i0SfLyM9M4at/qaVs+Uwem/A4+z75GpR/DeqPwctfgX87C575GKx9Bhq0RraIiJtOec7YWttqjPkC8ArO+eAnrbVbjDHfAtZYa5dG3rveGLMVCAOPWmuPD2fh0ju/18MtsyZwy6wJbKqo5qm3P2Txu/t5ZmUbV551FQ9e+wDzso/i2fob2PIiLP0ivPT3cOZVzhXZZ9/s3P5RREQSJqbfGVtrlwHLur32WNS2Bf4+8pARorQom3//69l87aZz+OWqffxi1V4eeHoNZ4xN5/5L7+P2+QvIOLEZNr8IW34Dv/kMeFNg2nXOVPZZN0Ig3e0/Q0Qk6WkFrlFgXGYKj1w7jc+Wn8kfNh/iybf28E9Lt/D9V7Zzx9xJ3H/pAkqu+xZUrO4M5vdfAn+aE8gz/wqmXgf+oNt/iohIUlIYjyIBn4fbZhdy2+xC1u07yVNv7eGZlXt46u0PuWZ6Pg9edgaX3vgdzA3/DPtWOsG89bfOdHYgE6bfAud+HF9Lq9t/iohIUlEYj1JzinOZU5zLN245h1+8s5dfrtrH/2xbxVkFGTxw6RQ+PucSUidfDjd9D/a87gTztqWwcTGXA2ydAhNnO79nnjDb2U7NdfvPEhE5LSmMR7mCrCBfuf5sPn/VVH6/4SBPvbWHr/9mE9/94/vcdcEk7rukhKIzr4Yzr4Zb/h32vsXuN3/NGcFqOPCeM6XdLqckEtCzO5/Txrj3x4mInCYUxgJA0O/ljrmT+MT5RazZe5Kn3vqQn775If/1xm6unzGeBy+bzIVTxmDOvIp9+w1ntP9esOEEHFoPB9fDoQ3O9tbfdR44p7hrOE+co4AWEelGYSxdGGO4YPIYLpg8hgNVjTy7ci+LV+/jj1sOM2NCFvdfWoKnoQ1rLcYYJ1jbR87tGk86wXxwfWdQb4v6aXr2JGd6e+JsmDDHeU4fm/g/VkRkhFAYS58Kc1JZcNN0HrlmGr9df4Cn39rDV3+9CYBvvfsnZkzIYsbErI7nafmZBHwe59zxGeXOo11jVefIuT2k33+p8/2sop5T3INcIUxE5HSjMJZTSg14ufvCYu66YBKbD9Tw6+Xv0po5nq0Ha1j87n4aW8IA+L2GqfmZXUN6QhbZaX5IzYEzrnQe7Rqr4PDGrlPcXQK6sNsU92zI0N05RST5KIwlZsYYSouyOT7JT3l5KQDhNsve4/VsPVTD1oM1bD1Uwxs7j/LrtRUdnyvMSe0ygp4xIYui3FRMag5Mmec82oVqogI6MorevgyIrKGeOaFnQGeOT2AviIjEn8JYhsTrMZwxLoMzxmXwkVkTO14/WtvEtkM1XUL6z9uO0NaeqUFf79PcwSyYfLnzaNdUC4c2dp3i3vFHOgI6Y3zPKe6sCYnrBBGRIVIYy7AYl5nCuMxxzDur87xvY3OY7UdqI+FcPYBp7kyYfJnzaNdUC4c3dw3onX8C2+a8n1HghHL7hWLjpjs/vfLqP3kRGXn0fyZJmNSAl9mTcpg9KafjtcFPc2dgSi6Bkks6v6C53rlVZPQU965XOwPa44cxUyBvqvMYOy2yPc25mtuYRHWFiEgXCmNxVdynuYsvhuKLO7+guR6ObIFjO+H4Tji+C47tgl3/A+HmznbB7M5gzpsKYyOBPeZMCKQlqDdEZLRSGMuIFNdp7kkXwqQLu35BWxiq9zvB3BHSO2HPm7Bxcde22ZMg78xuQT0NsovA4x3urhCRUUBhLKeNoUxzn1WQwbSCTKbmZ3Q8snInQ+5kmHZt1y9qrofjHzgB3f44thM2Pg9NNZ3tvCmRkI4Edce091StMiYiA6IwltNarNPcOyvreOuD4zS3tnW0GZ8VZFpBBmeOy2BaQQbT8jOZlp9Bbno6TJjlPKJZC/VHe055V74P2/8AbVF3s0odEwnnaU5Yt2+PmQK+lOHuFhE5zSiMJSn1Ns0dbrPsP9HArso6dlbWsbOyll2VdSxZs5+G5nBHu7EZgR4BPTU/g3GZKZiMfGfhkegruwHCLXByb2QkHX1u+lVY/4vOdsbjrNfdfn66/dx03jTImqiLyERGKYWxjBpej2Hy2HQmj03n2hkFHa+3tVkO1YTYecQJ551H6th1tI7frT9IbahztJsV9DGtoDOcp+Y7U98Ts4MYr98J1rFTgRu7fnGoJhLSHzhBfSwS1ntXQkt9Zzt/Wpdz0xMP1cDmE86Ud+qYzmddUCaSdBTGMup5PIbCnFQKc1IpP7tzuU1rLUdrm9hZWRcZTdey80gdr249wuLV+zvapQe8TM3P4Mz8zpH0tIIMinLT8HoMBLOgsMx5RLMWag5GjaY/cIL64FrY+lvOsm2w8/GeBfuCUeGc2zOse3sO5oDHM1xdKCJDpDAW6YMxhvysIPlZQS6b2vWuUifqm7sE9K7KOt7adYwX1x7oaJPi83DGuAwnnCMBPTU/g5K8dPxejzMlnV3oPKLX7AZobebtP7/EpbOnQ+MJ51aVXZ5Pdu5XboOG487dsmwbvTPO+uC9hnVu3yHuT41zr4pIbxTGIoMwJj3AhVPGcOGUrldN14Ra2FVZx64jneek1+47ydINBzva+DyGKWPTI+HsXOE9LT+DKWPTCfojP5XyBWhOGQMFM2Ivqq0NmqojgX2ylwCPeq496Pz+uvEEtDT0fUx/2qkDO3qUnjkeAukD6UoRQWEsEldZQT9lxbmUFed2eb2huZUPKuvZddQZSe+srGPboVr+uPlwx0ImHgMleemcOc4ZQddWtlC17gDZaX5yUv3kpAXISfWTlep3pr+780RuX5ma2/O9/rSE+g7t7qF+eFPn6+1rg3cXzHF+g51d5Nx5K7vQuUVmdqGzn1UIvsDAahRJcgpjkQRIC/goLcqmtCi7y+uhljB7jtd3BPSuyGj6tR2VtIQtz21b3+vxsoI+J5zT/GRHBXX3/dx0P9mpne383l7OG/uD4J/oXM0dq7Y2CFV1C+vjUHsIqg9AzQHnef+qSHBHM84V6V2CuqhraGcUaEEVGVUUxiIuCvq9TB+fxfTxWV1ebw238Yc/v8a5cy6gqrGFqoZmqhpanEdjC9UNzZHXnf39Jxqc1xtbsH0MWAEyUnyRsI48UgNRI+/u+50h3jF93s7jcaam08Y4V4D3p7k+EtAVUUFd4TyOboddf+l6VTmAx+fcLrO30XV2kbOdNkY/BZOkoTAWGYF8Xg+ZAWdBk4Foa7PUhlqpamzuCOqqhmaq24O7oYWqxmaqI++9X13T8V5rW98pHvR7yEmNHon7O/bbQzsn1U9eRgr5mSnkZ6WQFoj87yWQDuPOch69sdYZZXcE9f6uo+sDa2Db0q5riQP4Up3RfHahs2Rpb1Piwazev1NkhFEYiyQRj8eQneYnO81PSV7sn7PWUt8c7hiBVze2cDJqu2Nk3thCdUMLHx6rp6qhiqqGFprDvV/BnZHi6wjm/Mxg1+2o58wUH6b9XPf4mb0X2NbmrH7WfXTd/vzBcqg73PNq8pTszmCOHlVHn78WGQEUxiKCMYaMFB8ZKT6KBnD9l7WWUEsbVY3NnKxv4VhdE5W1TVTWhqisaeJobRNHakKs319FZW2IUEvP4A76PR1hXZAVZFy3AC/Icp5zMvIxmQVQeH7vxYRbnXPW3YO6fYr84DpoONbjY5d702FdHqRmOxefBSPPqVHbwezIfvR2tn76JXGjMBaRQTPGkBrwkhpIZUJ2/8FkraW2qZXKms6w7nx2trcdruG1HU3UNbX2+HzA64ksc5pCQS+jbef1fPKKivBE30YzWkuoW1gf4PCO9RSNzYTGKghVw4ndznNjVc9z2d15U7qGc/ew7m8/JUsXqUkHhbGIJIQxhqygn6ygn6n5/Z8Lb2hu7RLSRyLBfTTy2ofH6ln14QmqGlp6fNbrMYzNCESFdbcp8sw88vMmMrYkBb/Xwy67gqLy8t4LCbc4wdwezqHIoz24Q1VR71U7I+8TH+5jS44AAA00SURBVHTu23Dvx3V6xAnkYHaMo/Ko/UC6c8MRb0AXsSUJhbGIjDhpAR+Tx/qYPLb/BURCLWGO1joBfbQ2RGVkWrw9yA9Wh9hQUcXx+uYeV5kbA3npAdJMK1P3rKYgK8j4rCDjs52p8fHZzn52Wh4mfWzvBfTHWmiu6xrW/QV5qMoZlbfvn2pU3s7j7wxmb8D5Dbc3pdt2e5vo7fb2vXzWF2nXYzvSrst292NEtjXqHxCFsYictoJ+L5PGpDFpTP83z2gJtznns2u6ntOurA2x9cODHK4OsWG/E9o9v8NDQVYwKqyDPYI7PzNIwNftN9zGQEqm88guGvgf19rs3D+7I6xPRk2fN0Brk3OFebjZaRtu6rbdEmkT2W6od57DTZHX27cjxwg3DbzG/hhvlyC/uBXYlO2sre4POs++oNPGn+o8+1L72Y/6TJfP97LvPf2i7fSrWERkgPxeDxOyez+vvWLFCcrLrwCgqTVMZY0zuj5cE+JwdSiy3cSRaudCtMNbQl3ui91ubEagI6QLIqPq7ttZqT5MrNPKvgD4xsJgRuWDYW0koNsDvqmf7V5Cvb9/CLQ2UVWxl/Fjc5zXWxud54bjXfdbGqE15Dz3tcJbLDy+gYV3X/8ACGTA7Lvj1sX9URiLiESk+E490rbWUtXQ4oR1TYgj1ZHnSHgfrA6xdt9JTvZyPjvV742MrFP6DO1xmSm9r5Q23IyJ/ANgeJYqfX/FCsb3dW6+u/Z/GLSGOh8tofjtN57s4/3Grj+PC+YojEVERiJjDLnpAXLTA5wzoe9FRUItzii7e2i3b6/Ze5LKmqYev9M2BsZmRMI6MhXeue0E9pj0AGkBH0G/J/aR9umkyz8MErxwS/s/AlpCPReaGUYKYxGRYRD0eynOS6M4r/9R9on65qiRdVOX4K442cCavb1fNQ5OZqUHfKQFvJGHj/QUL6kBH+ld9r0d7dJTfJ1tA17SUjo/nx7wkZbiJeBN0pCPhdfvPFIyE/q1CmMREZcYY8jLSCEvI4VzJ2b32S7UEu6YBj9cE6K6sYX6pjANza00NDvP0fvVjS0cqmrsfK853Ot57r54PaYznANe0lKiwrtHqHeGflrA1+t+esDX73KrojAWERnxgn4vJXnplOQN/l7RLeE2GprDNDaHqW9upaHJee6+31u4NzSHqW9q5VhdMw3NDR37Dc3hAYVsyl/+4Kz0FvSRHnCe21d+S0/xkdnldS8ZKX7SU7xdPpMZdNq6cl59GCmMRURGAb/XQ3aqh+xUf1yP29za1jH6bogEdM+wD7P5/Z3kT5xEbVMr9U2t1IVaqWtq5UhNiN1NrdQ1halraul1ydTepPg8gwr29jCPbjsSgl1hLCIigxbweQj4AuT0/1NvVrTupbz8nFMerzXcRn1TmLrmzsCu6xbgHftR27WhViprQ+w+Orhgbw/p6GAfkx7g+3ecF9MxhkphLCIiI4bP6yE7zUN22tBH8L0Fe3SI14W67XcL9hO9LAIzXBTGIiKSlOIZ7MPN/YlyERGRUU5hLCIi4jKFsYiIiMsUxiIiIi6LKYyNMTcaY7YbY3YZYxb00+52Y4w1xsyNX4kiIiLJ7ZRhbIzxAouAm4AZwN3GmBm9tMsEHgFWxbtIERGRZBbLyPhCYJe1dre1thlYDNzWS7v/C3wXCMWxPhERkaQXSxgXAvuj9isir3UwxpQBk6y1L8exNhERkVFhyIt+GGM8wL8DD8TQdj4wH6CgoIAVK1YM9es71NXVxfV40jf1dWKonxND/ZwY6uf+xRLGB4BJUftFkdfaZQIzgRWR+1+OB5YaY2611q6JPpC19gngCYC5c+fa8vLywVfezYoVK4jn8aRv6uvEUD8nhvo5MdTP/Ytlmno1MM0YM8UYEwDuApa2v2mtrbbWjrXWTrbWTgbeAXoEsYiIiPTulGFsrW0FvgC8AmwDllhrtxhjvmWMuXW4CxQREUl2MZ0zttYuA5Z1e+2xPtqWD70sERGR0UMrcImIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4jKFsYiIiMsUxiIiIi5TGIuIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4jKFsYiIiMsUxiIiIi5TGIuIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4jKFsYiIiMsUxiIiIi5TGIuIiLhMYSwiIuIyhbGIiIjLFMYiIiIuUxiLiIi4TGEsIiLiMoWxiIiIyxTGIiIiLlMYi4iIuExhLCIi4rKYwtgYc6MxZrsxZpcxZkEv7/+9MWarMWajMebPxpiS+JcqIiKSnE4ZxsYYL7AIuAmYAdxtjJnRrdk6YK61dhbwAvC9eBcqIiKSrGIZGV8I7LLW7rbWNgOLgduiG1hrl1trGyK77wBF8S1TREQkefliaFMI7I/arwAu6qf9p4E/9PaGMWY+MB+goKCAFStWxFZlDOrq6uJ6POmb+jox1M+JoX5ODPVz/2IJ45gZY+4F5gJX9va+tfYJ4AmAuXPn2vLy8rh994oVK4jn8aRv6uvEUD8nhvo5MdTP/YsljA8Ak6L2iyKvdWGMuRb4BnCltbYpPuWJiIgkv1jOGa8GphljphhjAsBdwNLoBsaYOcBPgFuttZXxL1NERCR5nTKMrbWtwBeAV4BtwBJr7RZjzLeMMbdGmi0EMoD/NsasN8Ys7eNwIiIi0k1M54yttcuAZd1eeyxq+9o41yUiIjJqaAUuERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXKYxFRERcpjAWERFxmcJYRETEZQpjERERlymMRUREXKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGXxRTGxpgbjTHbjTG7jDELenk/xRjzfOT9VcaYyfEuVEREJFmdMoyNMV5gEXATMAO42xgzo1uzTwMnrbVTgR8A3413oSIiIskqlpHxhcAua+1ua20zsBi4rVub24CfR7ZfAK4xxpj4lSkiIpK8YgnjQmB/1H5F5LVe21hrW4FqIC8eBYqIiCQ7XyK/zBgzH5gf2a0zxmyP4+HHAsfieDzpm/o6MdTPiaF+Tgz1M5T09UYsYXwAmBS1XxR5rbc2FcYYH5ANHO9+IGvtE8ATMXzngBlj1lhr5w7HsaUr9XViqJ8TQ/2cGOrn/sUyTb0amGaMmWKMCQB3AUu7tVkK3B/Z/gTwF2utjV+ZIiIiyeuUI2Nrbasx5gvAK4AXeNJau8UY8y1gjbV2KfAz4FljzC7gBE5gi4iISAxiOmdsrV0GLOv22mNR2yHgjviWNmDDMv0tvVJfJ4b6OTHUz4mhfu6H0WyyiIiIu7QcpoiIiMuSIoxPtVynDJ0xZpIxZrkxZqsxZosx5hG3a0pmxhivMWadMeYlt2tJVsaYHGPMC8aY940x24wxl7hdU7Iyxvxd5P8bm40xvzLGBN2uaaQ57cM4xuU6Zehaga9Ya2cAFwOfVz8Pq0eAbW4XkeR+CPzRWjsdOA/197AwxhQCXwLmWmtn4lwIrIt8uzntw5jYluuUIbLWHrLWro1s1+L8j6v7SmwSB8aYIuAW4Kdu15KsjDHZwDycX4JgrW221la5W1VS8wGpkXUo0oCDLtcz4iRDGMeyXKfEUeSuXHOAVe5WkrT+A/gHoM3tQpLYFOAo8FTkdMBPjTHpbheVjKy1B4DvA/uAQ0C1tfZP7lY18iRDGEsCGWMygF8DX7bW1rhdT7IxxnwEqLTWvud2LUnOB5QBP7bWzgHqAV1vMgyMMbk4s5VTgIlAujHmXnerGnmSIYxjWa5T4sAY48cJ4uestS+6XU+Sugy41RizB+eUy9XGmF+4W1JSqgAqrLXtszsv4ISzxN+1wIfW2qPW2hbgReBSl2sacZIhjGNZrlOGKHJLzJ8B26y1/+52PcnKWvs1a22RtXYyzn/Lf7HWahQRZ9baw8B+Y8zZkZeuAba6WFIy2wdcbIxJi/x/5Bp0sVwPCb1r03Doa7lOl8tKRpcB9wGbjDHrI699PbI6m8jp6IvAc5F/xO8GHnS5nqRkrV1ljHkBWIvzq4x1aDWuHrQCl4iIiMuSYZpaRETktKYwFhERcZnCWERExGUKYxEREZcpjEVERFymMBYREXGZwlhERMRlCmMRERGX/X+FKIUr0iLK4AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, - "output_type": "display_data" - } - ], - "source": [ - "def plot_learning_curves(history):\n", - " pd.DataFrame(history.history).plot(figsize=(8, 5))\n", - " plt.grid(True)\n", - " plt.gca().set_ylim(0, 1)\n", - " plt.show()\n", - "plot_learning_curves(history)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/5\n", - "291/291 [==============================] - 2s 6ms/step - loss: 4.8169 - val_loss: 4.5730\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1151 - val_loss: 3.9278\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5486 - val_loss: 3.4186\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0920 - val_loss: 3.0067\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.7177 - val_loss: 2.6650\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.2921\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0068 - val_loss: 4.7979\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1171 - val_loss: 4.0133\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.4675 - val_loss: 3.4350\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 2.9901 - val_loss: 3.0049\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 2.6353 - val_loss: 2.6800\n", - "73/73 [==============================] - 0s 2ms/step - loss: 2.5213\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 9.3036 - val_loss: 8.4139\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 7.2816 - val_loss: 6.7408\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.8961 - val_loss: 5.5513\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 4.8929 - val_loss: 4.6717\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 4.1412 - val_loss: 4.0009\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.7795\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.4101 - val_loss: 4.9982\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4919 - val_loss: 4.2074\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.8180 - val_loss: 3.6181\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.3089 - val_loss: 3.1706\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.9186 - val_loss: 2.8252\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.6951\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.8741 - val_loss: 5.5725\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.7093 - val_loss: 4.5407\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8526 - val_loss: 3.7603\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.2003 - val_loss: 3.1531\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6945 - val_loss: 2.6759\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.5566\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.2941 - val_loss: 4.4632\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.9875 - val_loss: 4.1607\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 3.7184 - val_loss: 3.8935\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.4811 - val_loss: 3.6568\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.2701 - val_loss: 3.4459\n", - "73/73 [==============================] - 0s 6ms/step - loss: 3.5133\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 3.6447 - val_loss: 3.4718\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.4244 - val_loss: 3.2581\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.2293 - val_loss: 3.0665\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0530 - val_loss: 2.8942\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8945 - val_loss: 2.7397\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.6940\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.7541 - val_loss: 6.7783\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.0625 - val_loss: 6.0975\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.4993 - val_loss: 5.5445\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0333 - val_loss: 5.0862\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.6410 - val_loss: 4.7017\n", - "73/73 [==============================] - 0s 1ms/step - loss: 4.4608\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.0449 - val_loss: 5.9840\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.5157 - val_loss: 5.4873\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0672 - val_loss: 5.0642\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.6826 - val_loss: 4.6996\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.3496 - val_loss: 4.3825\n", - "73/73 [==============================] - 0s 1ms/step - loss: 4.3887\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0486 - val_loss: 5.3851\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.5588 - val_loss: 4.8678\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1570 - val_loss: 4.4457\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8225 - val_loss: 4.0898\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5329 - val_loss: 3.7827\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.1506\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.7016 - val_loss: 1.8071\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.3571 - val_loss: 1.1725\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0011 - val_loss: 0.9637\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8598 - val_loss: 0.8660\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7885 - val_loss: 0.8054\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.7158\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 4.2231 - val_loss: 2.7347\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.1102 - val_loss: 1.9381\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.6375 - val_loss: 1.6005\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.4040 - val_loss: 1.4049\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2423 - val_loss: 1.2323\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.1663\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 2.3103 - val_loss: 1.2870\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 1.0396 - val_loss: 1.0022\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 0.8901 - val_loss: 0.9237\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.8270 - val_loss: 0.8664\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 0.7782 - val_loss: 0.8163\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.7750\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.9203 - val_loss: 1.3846\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9788 - val_loss: 0.9880\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8097 - val_loss: 0.8389\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.7312 - val_loss: 0.7660\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.6805 - val_loss: 0.7162\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.7457\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.5828 - val_loss: 2.8808\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 2.1687 - val_loss: 1.9635\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.5279 - val_loss: 1.4081\n", - "Epoch 4/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 2s 6ms/step - loss: 1.1076 - val_loss: 1.0160\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8330 - val_loss: 0.7905\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.7793\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.8419 - val_loss: 0.7693\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.6910 - val_loss: 0.6971\n", - "Epoch 3/5\n", - "291/291 [==============================] - 2s 6ms/step - loss: 0.6449 - val_loss: 0.6545\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.6056 - val_loss: 0.6123\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.5691 - val_loss: 0.5765\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.5029\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.3857 - val_loss: 1.2507\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.9986 - val_loss: 0.7857\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6906 - val_loss: 0.7207\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.6332 - val_loss: 0.6706\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.5890 - val_loss: 0.6230\n", - "73/73 [==============================] - 0s 3ms/step - loss: 0.5720\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.8105 - val_loss: 0.7049\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.6388 - val_loss: 0.6503\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5910 - val_loss: 0.6074\n", - "Epoch 4/5\n", - "291/291 [==============================] - 2s 5ms/step - loss: 0.5540 - val_loss: 0.5716\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.5237 - val_loss: 0.5382\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.5127\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.8572 - val_loss: 0.7581\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6717 - val_loss: 0.6315\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.5860 - val_loss: 0.5931\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5448 - val_loss: 0.5634\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5176 - val_loss: 0.5400\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.5224\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.1553 - val_loss: 1.3268\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8686 - val_loss: 0.6880\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6061 - val_loss: 0.6185\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5627 - val_loss: 0.5864\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.5363 - val_loss: 0.5630\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.5539\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.3482 - val_loss: 0.7642\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.6706 - val_loss: 0.9423\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.3385 - val_loss: 0.6238\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.5603 - val_loss: 0.5355\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4953 - val_loss: 0.4855\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.4297\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.9948 - val_loss: 0.8004\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5545 - val_loss: 0.5286\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4713 - val_loss: 0.4793\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4382 - val_loss: 0.4580\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4208 - val_loss: 0.4364\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.4151\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.8369 - val_loss: 0.6091\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5032 - val_loss: 0.5002\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4483 - val_loss: 0.4479\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4240 - val_loss: 0.4295\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4113 - val_loss: 0.4319\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4317\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9804 - val_loss: 0.7013\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6021 - val_loss: 0.6005\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5350 - val_loss: 0.5424\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4914 - val_loss: 0.5037\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4616 - val_loss: 0.4892\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4591\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2666 - val_loss: 0.7628\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6287 - val_loss: 0.6277\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5483 - val_loss: 0.5561\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5036 - val_loss: 0.5200\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4761 - val_loss: 0.4922\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4822\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 11.3426 - val_loss: 9.6649\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 7.9234 - val_loss: 7.0355\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.9163 - val_loss: 5.3862\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.6081 - val_loss: 4.2673\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6999 - val_loss: 3.4746\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.2731\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.9577 - val_loss: 3.4614\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.2219 - val_loss: 2.8494\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6820 - val_loss: 2.4054\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.2791 - val_loss: 2.0831\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 1.9737 - val_loss: 1.8387\n", - "73/73 [==============================] - 0s 3ms/step - loss: 1.6884\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 4.7898 - val_loss: 4.1948\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 3.8259 - val_loss: 3.3577\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 3.1419 - val_loss: 2.7609\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 2.6467 - val_loss: 2.3250\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 2.2776 - val_loss: 1.9989\n", - "73/73 [==============================] - 0s 2ms/step - loss: 1.9425\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 5.8132 - val_loss: 5.0776\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 4.3716 - val_loss: 3.9162\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.4070 - val_loss: 3.1308\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.7462 - val_loss: 2.5878\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.2853 - val_loss: 2.2071\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.0792\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 1s 3ms/step - loss: 6.1835 - val_loss: 5.5398\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 4.6036 - val_loss: 4.2368\n", - "Epoch 3/5\n", - "291/291 [==============================] - 2s 5ms/step - loss: 3.5759 - val_loss: 3.3669\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 2.8785 - val_loss: 2.7662\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 2.3915 - val_loss: 2.3408\n", - "73/73 [==============================] - 0s 2ms/step - loss: 2.3029\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 5.3489 - val_loss: 5.2520\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.6538 - val_loss: 4.6065\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1037 - val_loss: 4.0977\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6613 - val_loss: 3.6868\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.2988 - val_loss: 3.3474\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.8968\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.2808 - val_loss: 5.2694\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.8091 - val_loss: 4.8044\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4032 - val_loss: 4.4053\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.0512 - val_loss: 4.0588\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.7438 - val_loss: 3.7558\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.5815\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.4911 - val_loss: 5.4957\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.8665 - val_loss: 4.8745\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.3461 - val_loss: 4.3599\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.9080 - val_loss: 3.9234\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5329 - val_loss: 3.5503\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.2364\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8242 - val_loss: 2.7665\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6523 - val_loss: 2.6052\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.4958 - val_loss: 2.4619\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.3534 - val_loss: 2.3323\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.2234 - val_loss: 2.2147\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.1935\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6630 - val_loss: 3.6252\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.4023 - val_loss: 3.3646\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.1727 - val_loss: 3.1349\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.9694 - val_loss: 2.9313\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.7887 - val_loss: 2.7489\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.6280\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.4105 - val_loss: 1.5951\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.1307 - val_loss: 0.9534\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7960 - val_loss: 0.7504\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6617 - val_loss: 0.6499\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5909 - val_loss: 0.5935\n", - "73/73 [==============================] - 0s 947us/step - loss: 0.5350\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0406 - val_loss: 1.7767\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2125 - val_loss: 1.0339\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8665 - val_loss: 0.8640\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7737 - val_loss: 0.7995\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7262 - val_loss: 0.7606\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6801\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6898 - val_loss: 1.6810\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2707 - val_loss: 0.9581\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8615 - val_loss: 0.7624\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6941 - val_loss: 0.6858\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6329 - val_loss: 0.6572\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6158\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.5504 - val_loss: 1.1766\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9466 - val_loss: 0.8649\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.7943 - val_loss: 0.8188\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.7503 - val_loss: 0.7907\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.7214 - val_loss: 0.7664\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.7321\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 3.0289 - val_loss: 1.5087\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.0624 - val_loss: 0.9393\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8066 - val_loss: 0.8399\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7395 - val_loss: 0.7947\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.7124 - val_loss: 0.7646\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.7279\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.2040 - val_loss: 0.6957\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6681 - val_loss: 0.6113\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5447 - val_loss: 0.5632\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5139 - val_loss: 0.5305\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4909 - val_loss: 0.5009\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.4421\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.3196 - val_loss: 0.6275\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5722 - val_loss: 0.5555\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5062 - val_loss: 0.5341\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4898 - val_loss: 0.5205\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4753 - val_loss: 0.5048\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4867\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.6235 - val_loss: 0.7264\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6525 - val_loss: 0.6394\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5787 - val_loss: 0.5830\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5417 - val_loss: 0.7250\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.5343 - val_loss: 0.6121\n", - "73/73 [==============================] - 0s 989us/step - loss: 0.5311\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 1.1426 - val_loss: 0.9536\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.3551 - val_loss: 0.7176\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6717 - val_loss: 0.6206\n", - "Epoch 4/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 1s 2ms/step - loss: 0.5368 - val_loss: 0.5669\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5008 - val_loss: 0.5377\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.5252\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.7605 - val_loss: 1.7377\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6260 - val_loss: 0.5712\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5062 - val_loss: 0.5299\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4784 - val_loss: 0.5035\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4626 - val_loss: 0.4875\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4773\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9199 - val_loss: 0.8311\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9259 - val_loss: 0.5136\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4665 - val_loss: 0.4505\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4377 - val_loss: 0.4385\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4285 - val_loss: 0.4362\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.3902\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0497 - val_loss: 0.6815\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6271 - val_loss: 0.5409\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4848 - val_loss: 0.4983\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4560 - val_loss: 0.4947\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4378 - val_loss: 0.4619\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4390\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0232 - val_loss: 0.7099\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5465 - val_loss: 0.5445\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4846 - val_loss: 0.4916\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4548 - val_loss: 0.4742\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4365 - val_loss: 0.4621\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4356\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.1020 - val_loss: 0.5543\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5268 - val_loss: 0.4679\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4284 - val_loss: 0.4413\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4153 - val_loss: 0.4318\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4082 - val_loss: 0.4287\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4246\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2389 - val_loss: 0.8160\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6134 - val_loss: 0.5100\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4617 - val_loss: 0.4737\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4357 - val_loss: 0.4494\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4217 - val_loss: 0.4427\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4434\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.9462 - val_loss: 5.3994\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.5568 - val_loss: 4.1889\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5882 - val_loss: 3.3368\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8971 - val_loss: 2.7195\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.3940 - val_loss: 2.2686\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.1046\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.1198 - val_loss: 4.4710\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.0800 - val_loss: 3.5782\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.3225 - val_loss: 2.9251\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.7587 - val_loss: 2.4422\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.3270 - val_loss: 2.0739\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.9872\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.4180 - val_loss: 5.6155\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.6716 - val_loss: 4.2075\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5735 - val_loss: 3.2943\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8514 - val_loss: 2.6878\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.3604 - val_loss: 2.2671\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.1656\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.6762 - val_loss: 5.1921\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4648 - val_loss: 4.1648\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6373 - val_loss: 3.4422\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0460 - val_loss: 2.9170\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6108 - val_loss: 2.5224\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.4740\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.4584 - val_loss: 5.0898\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.3076 - val_loss: 4.1554\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5466 - val_loss: 3.5179\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0222 - val_loss: 3.0626\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6462 - val_loss: 2.7265\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.5530\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.5961 - val_loss: 5.5606\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.9043 - val_loss: 4.9012\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.3269 - val_loss: 4.3492\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8424 - val_loss: 3.8820\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.4317 - val_loss: 3.4854\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.1216\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.6512 - val_loss: 6.0233\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.3255 - val_loss: 4.9024\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.3633 - val_loss: 4.0702\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6494 - val_loss: 3.4423\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.1090 - val_loss: 2.9607\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.8689\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.2986 - val_loss: 6.0976\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.4293 - val_loss: 5.2836\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.7284 - val_loss: 4.6202\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1536 - val_loss: 4.0759\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6796 - val_loss: 3.6258\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.4060\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 1s 2ms/step - loss: 5.2352 - val_loss: 5.0652\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4762 - val_loss: 4.3471\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8593 - val_loss: 3.7632\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.3549 - val_loss: 3.2855\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.9407 - val_loss: 2.8939\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.7242\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.4990 - val_loss: 6.1872\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.6846 - val_loss: 5.4463\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0232 - val_loss: 4.8393\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4751 - val_loss: 4.3345\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.0148 - val_loss: 3.9112\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.6314\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.7775 - val_loss: 1.0805\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8752 - val_loss: 0.8295\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7351 - val_loss: 0.7698\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6834 - val_loss: 0.7293\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6494 - val_loss: 0.6961\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6062\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.4905 - val_loss: 1.3628\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.9625 - val_loss: 0.9005\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7388 - val_loss: 0.7783\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6761 - val_loss: 0.7298\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6439 - val_loss: 0.6964\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.6262\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 2.4798 - val_loss: 1.1401\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.8964 - val_loss: 0.8314\n", - "Epoch 3/5\n", - "291/291 [==============================] - 2s 7ms/step - loss: 0.7351 - val_loss: 0.7566\n", - "Epoch 4/5\n", - "291/291 [==============================] - 2s 6ms/step - loss: 0.6868 - val_loss: 0.7200\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 0.6590 - val_loss: 0.6941\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6492\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 2.6666 - val_loss: 1.3002\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.0116 - val_loss: 0.9707\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8455 - val_loss: 0.8674\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7741 - val_loss: 0.8135\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.7288 - val_loss: 0.7753\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.7695\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 2.8676 - val_loss: 1.4169\n", - "Epoch 2/5\n", - "291/291 [==============================] - 2s 5ms/step - loss: 0.9800 - val_loss: 0.8503\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.7140 - val_loss: 0.7437\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6514 - val_loss: 0.7036\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6218 - val_loss: 0.6772\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6605\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.1299 - val_loss: 0.6697\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5930 - val_loss: 0.7551\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5378 - val_loss: 0.5328\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5006 - val_loss: 0.4989\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4861 - val_loss: 0.4922\n", - "73/73 [==============================] - 0s 943us/step - loss: 0.4453\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.7476 - val_loss: 0.7915\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6503 - val_loss: 0.6637\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5645 - val_loss: 0.5692\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5084 - val_loss: 0.5288\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4789 - val_loss: 0.5013\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4777\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8266 - val_loss: 0.7361\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6013 - val_loss: 0.6103\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5167 - val_loss: 0.5371\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4818 - val_loss: 0.4986\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4585 - val_loss: 0.4728\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4512\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0593 - val_loss: 0.7257\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8543 - val_loss: 0.5513\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4859 - val_loss: 0.5154\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4652 - val_loss: 0.5001\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.4529 - val_loss: 0.4907\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4780\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2468 - val_loss: 0.6793\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5681 - val_loss: 0.5651\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4914 - val_loss: 0.5098\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4578 - val_loss: 0.4871\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4407 - val_loss: 0.4668\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.4557\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 5ms/step - loss: 0.8795 - val_loss: 0.6695\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6576 - val_loss: 0.5118\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4713 - val_loss: 0.4705\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4478 - val_loss: 0.4548\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4295 - val_loss: 0.4358\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.3935\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.6897 - val_loss: 0.6858\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5339 - val_loss: 0.5148\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4858 - val_loss: 0.4658\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4206 - val_loss: 0.4369\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4057 - val_loss: 0.4349\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4082\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4084 - val_loss: 0.5663\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4907 - val_loss: 0.4774\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4581 - val_loss: 0.4706\n", - "Epoch 4/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 1s 2ms/step - loss: 0.4535 - val_loss: 0.4955\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 0.4354 - val_loss: 0.4710\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.4747\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.1389 - val_loss: 1.1028\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.3702 - val_loss: 0.5831\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4709 - val_loss: 0.4648\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4225 - val_loss: 0.4191\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4031 - val_loss: 0.4984\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4532\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.6040 - val_loss: 1.1473\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8149 - val_loss: 0.5125\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4445 - val_loss: 0.4635\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4117 - val_loss: 0.4230\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.3932 - val_loss: 0.4073\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4032\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.8431 - val_loss: 4.4298\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8964 - val_loss: 3.5922\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.2067 - val_loss: 2.9695\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6906 - val_loss: 2.5035\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.2988 - val_loss: 2.1467\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.4946\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6508 - val_loss: 3.3424\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8830 - val_loss: 2.6862\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.3444 - val_loss: 2.2240\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.9541 - val_loss: 1.8881\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.6693 - val_loss: 1.6400\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.5052\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0877 - val_loss: 4.3724\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.6712 - val_loss: 3.2121\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.7445 - val_loss: 2.4536\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.1272 - val_loss: 1.9471\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.7069 - val_loss: 1.6035\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.5064\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.3458 - val_loss: 3.8979\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.3387 - val_loss: 3.0390\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6441 - val_loss: 2.4462\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.1550 - val_loss: 2.0293\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.8072 - val_loss: 1.7341\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.7072\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.7489 - val_loss: 4.2925\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.7297 - val_loss: 3.4276\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0318 - val_loss: 2.8198\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.5223 - val_loss: 2.3716\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.1374 - val_loss: 2.0333\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.8961\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.0949 - val_loss: 5.9642\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.2081 - val_loss: 5.1238\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.5058 - val_loss: 4.4552\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.9423 - val_loss: 3.9172\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.4844 - val_loss: 3.4773\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.4060\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.8099 - val_loss: 5.6138\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0486 - val_loss: 4.9111\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 4.4310 - val_loss: 4.3354\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.9221 - val_loss: 3.8585\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 3.4999 - val_loss: 3.4587\n", - "73/73 [==============================] - 0s 2ms/step - loss: 3.3306\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.0719 - val_loss: 4.9424\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.3805 - val_loss: 4.2896\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8256 - val_loss: 3.7613\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.3768 - val_loss: 3.3289\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0103 - val_loss: 2.9760\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.8672\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.9293 - val_loss: 3.7809\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.4273 - val_loss: 3.3214\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.0156 - val_loss: 2.9436\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6760 - val_loss: 2.6306\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.3935 - val_loss: 2.3690\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.4299\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.6445 - val_loss: 5.4724\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.8032 - val_loss: 4.6855\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1332 - val_loss: 4.0505\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5889 - val_loss: 3.5309\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.1414 - val_loss: 3.1017\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.9799\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.8680 - val_loss: 0.9095\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7450 - val_loss: 0.7481\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6777 - val_loss: 0.7161\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6517 - val_loss: 0.6906\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6303 - val_loss: 0.6672\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.5767\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.7646 - val_loss: 1.3593\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9766 - val_loss: 0.9108\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7827 - val_loss: 0.8161\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7270 - val_loss: 0.7671\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6918 - val_loss: 0.7324\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6594\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 1s 2ms/step - loss: 2.4562 - val_loss: 1.0808\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8778 - val_loss: 0.7828\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7101 - val_loss: 0.7339\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6682 - val_loss: 0.7072\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6433 - val_loss: 0.6847\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6255\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.5754 - val_loss: 1.1130\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8611 - val_loss: 0.7854\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6886 - val_loss: 0.7143\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6345 - val_loss: 0.6764\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6030 - val_loss: 0.6476\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6255\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.6733 - val_loss: 0.9456\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8057 - val_loss: 0.7899\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7073 - val_loss: 0.7410\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6618 - val_loss: 0.7061\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6299 - val_loss: 0.6771\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6498\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0539 - val_loss: 0.7109\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6761 - val_loss: 0.6193\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5472 - val_loss: 0.5222\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4975 - val_loss: 0.4945\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4777 - val_loss: 0.4760\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4122\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0067 - val_loss: 0.6291\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5524 - val_loss: 0.6332\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.0710 - val_loss: 0.6396\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5363 - val_loss: 0.5346\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4777 - val_loss: 0.4961\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4685\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2691 - val_loss: 0.7390\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6981 - val_loss: 0.6452\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5722 - val_loss: 0.5759\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5152 - val_loss: 0.5298\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4863 - val_loss: 0.5171\n", - "73/73 [==============================] - 0s 2ms/step - loss: 0.4828\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.1677 - val_loss: 0.7886\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0718 - val_loss: 0.7433\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5381 - val_loss: 0.5369\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4745 - val_loss: 0.4975\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4491 - val_loss: 0.4757\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4637\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9471 - val_loss: 0.7296\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8193 - val_loss: 0.6135\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5363 - val_loss: 0.5541\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4980 - val_loss: 0.5192\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4775 - val_loss: 0.5033\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4845\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.8326 - val_loss: 0.6469\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6240 - val_loss: 0.5958\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5536 - val_loss: 0.5016\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4686 - val_loss: 0.4842\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4493 - val_loss: 0.4616\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4097\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2851 - val_loss: 0.7020\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5312 - val_loss: 0.5198\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4622 - val_loss: 0.4763\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4367 - val_loss: 0.4503\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4220 - val_loss: 0.4425\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4271\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.5619 - val_loss: 0.8989\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4934 - val_loss: 0.4401\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4101 - val_loss: 0.4346\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.3935 - val_loss: 0.4130\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.3857 - val_loss: 0.3897\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.3874\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9801 - val_loss: 1.3518\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.8492 - val_loss: 0.5649\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4798 - val_loss: 0.5138\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4331 - val_loss: 0.4536\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4110 - val_loss: 0.4318\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4307\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.1531 - val_loss: 0.7210\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.6263 - val_loss: 0.5434\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5054 - val_loss: 0.5116\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4522 - val_loss: 0.4736\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4431 - val_loss: 0.5277\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4751\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.6641 - val_loss: 4.8325\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8576 - val_loss: 3.2776\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6229 - val_loss: 2.2628\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.8654 - val_loss: 1.6736\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.4440 - val_loss: 1.3556\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.2508\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 7.2170 - val_loss: 6.9118\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.9878 - val_loss: 5.8670\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.1511 - val_loss: 5.1158\n", - "Epoch 4/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 1s 2ms/step - loss: 4.5268 - val_loss: 4.5353\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.0324 - val_loss: 4.0648\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.9163\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.5210 - val_loss: 4.8195\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.0962 - val_loss: 3.8202\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.3504 - val_loss: 3.2475\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.9146 - val_loss: 2.8860\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6333 - val_loss: 2.6334\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.5792\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0636 - val_loss: 4.9443\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 4.5002 - val_loss: 4.3994\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.0020 - val_loss: 3.9139\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5588 - val_loss: 3.4798\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.1645 - val_loss: 3.0939\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.8371\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.9923 - val_loss: 3.8633\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.3281 - val_loss: 3.2841\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8468 - val_loss: 2.8545\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.4866 - val_loss: 2.5283\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.2078 - val_loss: 2.2688\n", - "73/73 [==============================] - 0s 2ms/step - loss: 2.1177\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.5277 - val_loss: 4.5153\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1607 - val_loss: 4.1431\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8010 - val_loss: 3.7765\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.4551 - val_loss: 3.4328\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.1402 - val_loss: 3.1273\n", - "73/73 [==============================] - 0s 1ms/step - loss: 2.8686\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.7848 - val_loss: 4.8538\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4333 - val_loss: 4.5093\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1206 - val_loss: 4.2002\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8409 - val_loss: 3.9234\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.5894 - val_loss: 3.6717\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.5211\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 4ms/step - loss: 9.1569 - val_loss: 8.6366\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 7.5285 - val_loss: 7.2997\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.4347 - val_loss: 6.3497\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.6371 - val_loss: 5.6331\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.0232 - val_loss: 5.0635\n", - "73/73 [==============================] - 0s 1ms/step - loss: 4.8631\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 6.3648 - val_loss: 6.4245\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.8497 - val_loss: 5.9268\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.4040 - val_loss: 5.4928\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.0078 - val_loss: 5.1023\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.6443 - val_loss: 4.7369\n", - "73/73 [==============================] - 0s 2ms/step - loss: 4.4073\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.1593 - val_loss: 5.2022\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 4.8037 - val_loss: 4.8456\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.4718 - val_loss: 4.5103\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1593 - val_loss: 4.1932\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 3.8634 - val_loss: 3.8906\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.8143\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.6233 - val_loss: 1.5174\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.1822 - val_loss: 1.1096\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.9414 - val_loss: 0.9299\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.8175 - val_loss: 0.8322\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.7501 - val_loss: 0.7778\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.6963\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.1645 - val_loss: 2.6877\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 1.9671 - val_loss: 1.7414\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 1.4915 - val_loss: 1.4736\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.3302 - val_loss: 1.3518\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 1.2437 - val_loss: 1.2727\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.2184\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2925 - val_loss: 1.8886\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3801 - val_loss: 1.2669\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0121 - val_loss: 0.9720\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8040 - val_loss: 0.7939\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6898 - val_loss: 0.7023\n", - "73/73 [==============================] - 0s 741us/step - loss: 0.6630\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1169 - val_loss: 1.2152\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9299 - val_loss: 0.8905\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7871 - val_loss: 0.7966\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7281 - val_loss: 0.7472\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6913 - val_loss: 0.7120\n", - "73/73 [==============================] - 0s 998us/step - loss: 0.7615\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.4891 - val_loss: 1.2539\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9480 - val_loss: 0.8004\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7368 - val_loss: 0.7300\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6747 - val_loss: 0.6932\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6403 - val_loss: 0.6685\n", - "73/73 [==============================] - 0s 887us/step - loss: 0.6678\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4230 - val_loss: 0.9523\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7410 - val_loss: 0.6910\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6234 - val_loss: 0.6393\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.5747 - val_loss: 0.5845\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5367 - val_loss: 0.5476\n", - "73/73 [==============================] - 0s 890us/step - loss: 0.4781\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 1.0420 - val_loss: 0.6764\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6167 - val_loss: 0.5992\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5512 - val_loss: 0.5490\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5034 - val_loss: 0.5085\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4736 - val_loss: 0.4957\n", - "73/73 [==============================] - 0s 776us/step - loss: 0.4760\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0855 - val_loss: 1.4162\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2415 - val_loss: 1.2320\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0421 - val_loss: 1.0364\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8814 - val_loss: 0.8794\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7793 - val_loss: 0.7572\n", - "73/73 [==============================] - 0s 731us/step - loss: 0.7009\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0203 - val_loss: 0.7237\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6118 - val_loss: 0.6342\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5551 - val_loss: 0.5826\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5133 - val_loss: 0.5398\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4800 - val_loss: 0.5091\n", - "73/73 [==============================] - 0s 902us/step - loss: 0.4912\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8002 - val_loss: 1.0841\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8644 - val_loss: 0.8159\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7184 - val_loss: 0.7334\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6618 - val_loss: 0.6931\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6270 - val_loss: 0.6700\n", - "73/73 [==============================] - 0s 741us/step - loss: 0.6656\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9480 - val_loss: 0.6132\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5560 - val_loss: 0.5532\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5083 - val_loss: 0.5110\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4791 - val_loss: 0.4829\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4592 - val_loss: 0.4626\n", - "73/73 [==============================] - 0s 776us/step - loss: 0.4163\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1311 - val_loss: 0.6862\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6238 - val_loss: 0.6058\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5496 - val_loss: 0.5516\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5094 - val_loss: 0.5190\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4807 - val_loss: 0.4944\n", - "73/73 [==============================] - 0s 750us/step - loss: 0.4804\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1722 - val_loss: 0.6078\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5294 - val_loss: 0.5360\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4759 - val_loss: 0.4791\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4560 - val_loss: 0.4741\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4444 - val_loss: 0.4566\n", - "73/73 [==============================] - 0s 769us/step - loss: 0.4447\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3475 - val_loss: 0.9600\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8352 - val_loss: 0.8075\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7201 - val_loss: 0.7440\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6635 - val_loss: 0.6942\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6155 - val_loss: 0.6405\n", - "73/73 [==============================] - 0s 701us/step - loss: 0.6084\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8615 - val_loss: 0.6151\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5342 - val_loss: 0.5509\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4822 - val_loss: 0.4939\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4543 - val_loss: 0.4626\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4327 - val_loss: 0.4645\n", - "73/73 [==============================] - 0s 776us/step - loss: 0.4588\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 6.1586 - val_loss: 5.3790\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5237 - val_loss: 4.0776\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5179 - val_loss: 3.2189\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.8315 - val_loss: 2.6213\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3441 - val_loss: 2.1926\n", - "73/73 [==============================] - 0s 705us/step - loss: 1.9314\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 7.8600 - val_loss: 6.4607\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.5827 - val_loss: 4.9671\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3741 - val_loss: 3.9834\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5144 - val_loss: 3.2495\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.8621 - val_loss: 2.6708\n", - "73/73 [==============================] - 0s 712us/step - loss: 2.6098\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 7.8475 - val_loss: 6.9182\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.6535 - val_loss: 5.2722\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4071 - val_loss: 4.2072\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5414 - val_loss: 3.4290\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.8906 - val_loss: 2.8284\n", - "73/73 [==============================] - 0s 734us/step - loss: 2.6736\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 7.7978 - val_loss: 6.8712\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.4940 - val_loss: 5.0133\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2015 - val_loss: 3.9064\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3682 - val_loss: 3.1955\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.8020 - val_loss: 2.7297\n", - "73/73 [==============================] - 0s 743us/step - loss: 2.4731\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0610 - val_loss: 4.8365\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2445 - val_loss: 4.0615\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5845 - val_loss: 3.4273\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0383 - val_loss: 2.8955\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5821 - val_loss: 2.4540\n", - "73/73 [==============================] - 0s 795us/step - loss: 2.4359\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0115 - val_loss: 4.9367\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5337 - val_loss: 4.4908\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1286 - val_loss: 4.1078\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7801 - val_loss: 3.7770\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4782 - val_loss: 3.4883\n", - "73/73 [==============================] - 0s 759us/step - loss: 3.2049\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3891 - val_loss: 4.3838\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9677 - val_loss: 3.9649\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5987 - val_loss: 3.5919\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2732 - val_loss: 3.2638\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9879 - val_loss: 2.9737\n", - "73/73 [==============================] - 0s 756us/step - loss: 2.8622\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.5032 - val_loss: 5.3251\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7847 - val_loss: 4.6464\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2243 - val_loss: 4.1080\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7831 - val_loss: 3.6825\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4300 - val_loss: 3.3364\n", - "73/73 [==============================] - 0s 741us/step - loss: 3.2063\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3948 - val_loss: 4.2735\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8824 - val_loss: 3.7927\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4405 - val_loss: 3.3761\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0552 - val_loss: 3.0141\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7174 - val_loss: 2.6972\n", - "73/73 [==============================] - 0s 745us/step - loss: 2.5201\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.9653 - val_loss: 5.8882\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2793 - val_loss: 5.2471\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7084 - val_loss: 4.7055\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2204 - val_loss: 4.2358\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7954 - val_loss: 3.8254\n", - "73/73 [==============================] - 0s 749us/step - loss: 3.7291\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9685 - val_loss: 1.1124\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9086 - val_loss: 0.8290\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7516 - val_loss: 0.7621\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6977 - val_loss: 0.7265\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6659 - val_loss: 0.6921\n", - "73/73 [==============================] - 0s 742us/step - loss: 0.6005\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1963 - val_loss: 1.5580\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0259 - val_loss: 0.8754\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7683 - val_loss: 0.7825\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7145 - val_loss: 0.7427\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6839 - val_loss: 0.7145\n", - "73/73 [==============================] - 0s 745us/step - loss: 0.6608\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3852 - val_loss: 1.2502\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9576 - val_loss: 0.8568\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7460 - val_loss: 0.7411\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6740 - val_loss: 0.6844\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6355 - val_loss: 0.6534\n", - "73/73 [==============================] - 0s 678us/step - loss: 0.6334\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1238 - val_loss: 1.2594\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0309 - val_loss: 0.9895\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8577 - val_loss: 0.8801\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7830 - val_loss: 0.8265\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7413 - val_loss: 0.7910\n", - "73/73 [==============================] - 0s 755us/step - loss: 0.7701\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5229 - val_loss: 1.3573\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1336 - val_loss: 0.9946\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8970 - val_loss: 0.8638\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7955 - val_loss: 0.8096\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7456 - val_loss: 0.7789\n", - "73/73 [==============================] - 0s 773us/step - loss: 0.7643\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2120 - val_loss: 0.7290\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6061 - val_loss: 0.5779\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5221 - val_loss: 0.5214\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4830 - val_loss: 0.4880\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4586 - val_loss: 0.4659\n", - "73/73 [==============================] - 0s 794us/step - loss: 0.4235\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2511 - val_loss: 0.9892\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8195 - val_loss: 0.6624\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5581 - val_loss: 0.5669\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5078 - val_loss: 0.5251\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4784 - val_loss: 0.5039\n", - "73/73 [==============================] - 0s 782us/step - loss: 0.4810\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9067 - val_loss: 0.6472\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5555 - val_loss: 0.5485\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4997 - val_loss: 0.5108\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4699 - val_loss: 0.4810\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4597 - val_loss: 0.4688\n", - "73/73 [==============================] - 0s 773us/step - loss: 0.4569\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2706 - val_loss: 0.6720\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5706 - val_loss: 0.5938\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5258 - val_loss: 0.5550\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4977 - val_loss: 0.5244\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4749 - val_loss: 0.5032\n", - "73/73 [==============================] - 0s 737us/step - loss: 0.5040\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2697 - val_loss: 0.6926\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5834 - val_loss: 0.6005\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5354 - val_loss: 0.5558\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5024 - val_loss: 0.5205\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4770 - val_loss: 0.4957\n", - "73/73 [==============================] - 0s 813us/step - loss: 0.4989\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 0.8114 - val_loss: 0.5644\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4998 - val_loss: 0.4941\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4645 - val_loss: 0.4577\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4620 - val_loss: 0.4887\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5371 - val_loss: 0.4676\n", - "73/73 [==============================] - 0s 785us/step - loss: 0.4544\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9807 - val_loss: 0.6228\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5320 - val_loss: 0.5100\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4643 - val_loss: 0.4632\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4374 - val_loss: 0.4405\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4216 - val_loss: 0.4283\n", - "73/73 [==============================] - 0s 795us/step - loss: 0.4126\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3037 - val_loss: 0.7738\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5818 - val_loss: 0.5106\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4501 - val_loss: 0.4415\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4195 - val_loss: 0.4227\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4094 - val_loss: 0.4200\n", - "73/73 [==============================] - 0s 742us/step - loss: 0.4179\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7948 - val_loss: 0.6184\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5231 - val_loss: 0.5104\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4645 - val_loss: 0.4827\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4427 - val_loss: 0.4622\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4295 - val_loss: 0.4479\n", - "73/73 [==============================] - 0s 797us/step - loss: 0.4482\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8815 - val_loss: 1.8018\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1401 - val_loss: 1.0293\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8023 - val_loss: 0.7424\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6288 - val_loss: 0.6142\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5315 - val_loss: 0.5409\n", - "73/73 [==============================] - 0s 770us/step - loss: 0.5320\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0118 - val_loss: 4.6170\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9815 - val_loss: 3.6953\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1890 - val_loss: 2.9776\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5746 - val_loss: 2.4242\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1076 - val_loss: 2.0075\n", - "73/73 [==============================] - 0s 707us/step - loss: 1.8704\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9963 - val_loss: 4.3942\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5796 - val_loss: 3.2556\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7069 - val_loss: 2.5413\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1482 - val_loss: 2.0784\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.7759 - val_loss: 1.7648\n", - "73/73 [==============================] - 0s 748us/step - loss: 1.6183\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7519 - val_loss: 4.0855\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2999 - val_loss: 2.9238\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.4370 - val_loss: 2.2299\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9157 - val_loss: 1.8096\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5931 - val_loss: 1.5461\n", - "73/73 [==============================] - 0s 748us/step - loss: 1.4312\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9929 - val_loss: 4.6813\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0927 - val_loss: 3.9001\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4243 - val_loss: 3.3209\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9245 - val_loss: 2.8990\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5587 - val_loss: 2.5942\n", - "73/73 [==============================] - 0s 747us/step - loss: 2.6023\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2935 - val_loss: 4.8570\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1148 - val_loss: 3.7904\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2019 - val_loss: 2.9567\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.4980 - val_loss: 2.3245\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9810 - val_loss: 1.8730\n", - "73/73 [==============================] - 0s 739us/step - loss: 1.8853\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9591 - val_loss: 4.7765\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3151 - val_loss: 4.1725\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7881 - val_loss: 3.6747\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3531 - val_loss: 3.2628\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9913 - val_loss: 2.9188\n", - "73/73 [==============================] - 0s 733us/step - loss: 2.9332\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1775 - val_loss: 5.0870\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6242 - val_loss: 4.5608\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1376 - val_loss: 4.0952\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7064 - val_loss: 3.6831\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3243 - val_loss: 3.3188\n", - "73/73 [==============================] - 0s 779us/step - loss: 3.2203\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 6.4158 - val_loss: 5.9867\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1875 - val_loss: 4.9300\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3307 - val_loss: 4.1768\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7101 - val_loss: 3.6235\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2461 - val_loss: 3.2031\n", - "73/73 [==============================] - 0s 775us/step - loss: 3.0986\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4729 - val_loss: 4.2768\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8722 - val_loss: 3.7205\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3785 - val_loss: 3.2665\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9731 - val_loss: 2.8924\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6386 - val_loss: 2.5875\n", - "73/73 [==============================] - 0s 740us/step - loss: 2.6656\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4344 - val_loss: 4.4528\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9696 - val_loss: 4.0036\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5653 - val_loss: 3.6109\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2116 - val_loss: 3.2672\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9053 - val_loss: 2.9670\n", - "73/73 [==============================] - 0s 734us/step - loss: 2.7470\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3381 - val_loss: 1.1153\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8662 - val_loss: 0.8397\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7522 - val_loss: 0.7736\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7131 - val_loss: 0.7398\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6854 - val_loss: 0.7113\n", - "73/73 [==============================] - 0s 742us/step - loss: 0.6311\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2561 - val_loss: 1.0241\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8306 - val_loss: 0.8050\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7231 - val_loss: 0.7439\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6818 - val_loss: 0.7067\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6502 - val_loss: 0.6766\n", - "73/73 [==============================] - 0s 742us/step - loss: 0.6316\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.4945 - val_loss: 1.4076\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9687 - val_loss: 0.8534\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7364 - val_loss: 0.7533\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6875 - val_loss: 0.7237\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6646 - val_loss: 0.7047\n", - "73/73 [==============================] - 0s 723us/step - loss: 0.6433\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1617 - val_loss: 1.3695\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0031 - val_loss: 0.9240\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7751 - val_loss: 0.7954\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7037 - val_loss: 0.7504\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6678 - val_loss: 0.7184\n", - "73/73 [==============================] - 0s 743us/step - loss: 0.6919\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3779 - val_loss: 1.0763\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8187 - val_loss: 0.7963\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6833 - val_loss: 0.7262\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6367 - val_loss: 0.6852\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6085 - val_loss: 0.6579\n", - "73/73 [==============================] - 0s 725us/step - loss: 0.6309\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.0614 - val_loss: 0.6549\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5763 - val_loss: 0.5728\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5164 - val_loss: 0.5159\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4774 - val_loss: 0.4805\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4504 - val_loss: 0.4548\n", - "73/73 [==============================] - 0s 711us/step - loss: 0.4067\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0348 - val_loss: 0.6440\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5472 - val_loss: 0.5333\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4788 - val_loss: 0.4913\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4522 - val_loss: 0.4811\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4368 - val_loss: 0.4566\n", - "73/73 [==============================] - 0s 742us/step - loss: 0.4366\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1848 - val_loss: 0.6867\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6337 - val_loss: 0.5823\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5195 - val_loss: 0.5370\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4860 - val_loss: 0.5067\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4633 - val_loss: 0.4842\n", - "73/73 [==============================] - 0s 769us/step - loss: 0.4617\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1639 - val_loss: 0.7197\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6331 - val_loss: 0.6613\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6715 - val_loss: 0.5386\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4791 - val_loss: 0.5033\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4572 - val_loss: 0.4787\n", - "73/73 [==============================] - 0s 785us/step - loss: 0.4762\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0270 - val_loss: 0.6476\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5398 - val_loss: 0.5397\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4812 - val_loss: 0.4949\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4523 - val_loss: 0.4699\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4380 - val_loss: 0.4514\n", - "73/73 [==============================] - 0s 756us/step - loss: 0.4447\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7495 - val_loss: 0.6997\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5457 - val_loss: 0.5050\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4546 - val_loss: 0.4584\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4297 - val_loss: 0.4342\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4084 - val_loss: 0.4056\n", - "73/73 [==============================] - 0s 755us/step - loss: 0.3613\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9667 - val_loss: 0.5910\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5323 - val_loss: 0.5016\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4581 - val_loss: 0.4575\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4247 - val_loss: 0.4287\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4072 - val_loss: 0.4158\n", - "73/73 [==============================] - 0s 777us/step - loss: 0.4019\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9655 - val_loss: 0.5448\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4861 - val_loss: 0.4841\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4452 - val_loss: 0.4581\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4193 - val_loss: 0.4270\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4039 - val_loss: 0.4250\n", - "73/73 [==============================] - 0s 784us/step - loss: 0.4071\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8438 - val_loss: 0.6067\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6505 - val_loss: 0.6540\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6769 - val_loss: 0.5327\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4426 - val_loss: 0.4589\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4146 - val_loss: 0.4284\n", - "73/73 [==============================] - 0s 761us/step - loss: 0.4282\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 0.7299 - val_loss: 0.5967\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4993 - val_loss: 0.5064\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4567 - val_loss: 0.4636\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4197 - val_loss: 0.4369\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4032 - val_loss: 0.4196\n", - "73/73 [==============================] - 0s 728us/step - loss: 0.4158\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.6228 - val_loss: 5.0109\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2094 - val_loss: 3.8786\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2886 - val_loss: 3.1267\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6749 - val_loss: 2.6246\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2651 - val_loss: 2.2845\n", - "73/73 [==============================] - 0s 748us/step - loss: 2.0913\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2815 - val_loss: 4.8232\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9833 - val_loss: 3.7745\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1174 - val_loss: 3.0497\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5106 - val_loss: 2.5315\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0805 - val_loss: 2.1548\n", - "73/73 [==============================] - 0s 790us/step - loss: 1.8916\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9459 - val_loss: 3.4256\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7096 - val_loss: 2.4190\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9359 - val_loss: 1.7957\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4690 - val_loss: 1.4218\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1965 - val_loss: 1.1990\n", - "73/73 [==============================] - 0s 671us/step - loss: 1.0999\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3947 - val_loss: 3.7929\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1948 - val_loss: 2.7643\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3949 - val_loss: 2.1030\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8832 - val_loss: 1.6969\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5645 - val_loss: 1.4547\n", - "73/73 [==============================] - 0s 741us/step - loss: 1.6620\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3166 - val_loss: 3.7630\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1182 - val_loss: 2.7475\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3206 - val_loss: 2.0768\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8004 - val_loss: 1.6480\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4666 - val_loss: 1.3745\n", - "73/73 [==============================] - 0s 777us/step - loss: 1.3071\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 6.1250 - val_loss: 5.8585\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2140 - val_loss: 5.0181\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4914 - val_loss: 4.3381\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9010 - val_loss: 3.7762\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4132 - val_loss: 3.3111\n", - "73/73 [==============================] - 0s 715us/step - loss: 3.0906\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1080 - val_loss: 4.9837\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4769 - val_loss: 4.4024\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9592 - val_loss: 3.9200\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5278 - val_loss: 3.5171\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1632 - val_loss: 3.1757\n", - "73/73 [==============================] - 0s 842us/step - loss: 2.9912\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.4720 - val_loss: 5.3090\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6994 - val_loss: 4.6036\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0962 - val_loss: 4.0425\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6100 - val_loss: 3.5849\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2125 - val_loss: 3.2082\n", - "73/73 [==============================] - 0s 753us/step - loss: 3.0407\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4942 - val_loss: 4.3314\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7624 - val_loss: 3.7059\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1912 - val_loss: 3.2194\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7433 - val_loss: 2.8403\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3943 - val_loss: 2.5434\n", - "73/73 [==============================] - 0s 754us/step - loss: 2.7792\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9944 - val_loss: 4.9636\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4056 - val_loss: 4.3921\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9035 - val_loss: 3.8990\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4698 - val_loss: 3.4700\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0928 - val_loss: 3.0957\n", - "73/73 [==============================] - 0s 765us/step - loss: 3.0252\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0023 - val_loss: 0.9533\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8033 - val_loss: 0.7938\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7195 - val_loss: 0.7491\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6856 - val_loss: 0.7212\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6615 - val_loss: 0.6965\n", - "73/73 [==============================] - 0s 744us/step - loss: 0.6045\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2450 - val_loss: 0.9491\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7618 - val_loss: 0.7292\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6564 - val_loss: 0.6806\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6204 - val_loss: 0.6501\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5950 - val_loss: 0.6274\n", - "73/73 [==============================] - 0s 726us/step - loss: 0.5726\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2090 - val_loss: 1.0413\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8362 - val_loss: 0.7249\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6537 - val_loss: 0.6801\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6144 - val_loss: 0.6540\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5890 - val_loss: 0.6287\n", - "73/73 [==============================] - 0s 775us/step - loss: 0.5801\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3889 - val_loss: 1.1377\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8420 - val_loss: 0.7974\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7043 - val_loss: 0.7268\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6557 - val_loss: 0.6920\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6232 - val_loss: 0.6650\n", - "73/73 [==============================] - 0s 738us/step - loss: 0.6350\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9025 - val_loss: 1.1555\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8186 - val_loss: 0.8194\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6931 - val_loss: 0.7417\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6443 - val_loss: 0.6950\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6123 - val_loss: 0.6622\n", - "73/73 [==============================] - 0s 759us/step - loss: 0.6224\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9533 - val_loss: 0.7139\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7184 - val_loss: 0.6025\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5380 - val_loss: 0.5395\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4826 - val_loss: 0.4921\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4485 - val_loss: 0.4574\n", - "73/73 [==============================] - 0s 734us/step - loss: 0.3998\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2287 - val_loss: 0.6932\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6001 - val_loss: 0.6137\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5416 - val_loss: 0.5587\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4960 - val_loss: 0.5179\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4686 - val_loss: 0.4812\n", - "73/73 [==============================] - 0s 935us/step - loss: 0.4532\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9267 - val_loss: 0.6546\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5586 - val_loss: 0.5487\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4945 - val_loss: 0.5146\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4687 - val_loss: 0.4833\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4487 - val_loss: 0.4622\n", - "73/73 [==============================] - 0s 754us/step - loss: 0.4462\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8807 - val_loss: 0.6649\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5498 - val_loss: 0.5828\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5006 - val_loss: 0.5351\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4734 - val_loss: 0.5161\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4523 - val_loss: 0.4945\n", - "73/73 [==============================] - 0s 683us/step - loss: 0.4653\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1786 - val_loss: 0.6675\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5539 - val_loss: 0.5520\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4869 - val_loss: 0.5067\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4525 - val_loss: 0.4721\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4284 - val_loss: 0.4444\n", - "73/73 [==============================] - 0s 766us/step - loss: 0.4469\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7991 - val_loss: 1.7612\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5986 - val_loss: 0.6048\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5762 - val_loss: 0.4676\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4401 - val_loss: 0.4329\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4164 - val_loss: 0.4159\n", - "73/73 [==============================] - 0s 755us/step - loss: 0.4005\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9572 - val_loss: 0.6219\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5170 - val_loss: 0.5151\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4572 - val_loss: 0.4713\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4272 - val_loss: 0.4421\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4037 - val_loss: 0.4118\n", - "73/73 [==============================] - 0s 758us/step - loss: 0.3930\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3686 - val_loss: 0.8028\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9956 - val_loss: 0.4716\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4191 - val_loss: 0.4250\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4006 - val_loss: 0.4051\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3874 - val_loss: 0.4005\n", - "73/73 [==============================] - 0s 770us/step - loss: 0.3902\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7477 - val_loss: 0.5826\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4865 - val_loss: 0.5033\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4655 - val_loss: 0.6109\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5462 - val_loss: 0.4520\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4133 - val_loss: 0.4261\n", - "73/73 [==============================] - 0s 749us/step - loss: 0.4355\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4955 - val_loss: 1.1431\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2784 - val_loss: 0.4875\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4232 - val_loss: 0.4448\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3824 - val_loss: 0.4004\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3608 - val_loss: 0.3813\n", - "73/73 [==============================] - 0s 738us/step - loss: 0.3868\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.9880 - val_loss: 5.7623\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2137 - val_loss: 5.1346\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6754 - val_loss: 4.6406\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2372 - val_loss: 4.2291\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8678 - val_loss: 3.8781\n", - "73/73 [==============================] - 0s 732us/step - loss: 3.5569\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7701 - val_loss: 4.5420\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0600 - val_loss: 3.8634\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4736 - val_loss: 3.3011\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0006 - val_loss: 2.8689\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6321 - val_loss: 2.5144\n", - "73/73 [==============================] - 0s 741us/step - loss: 2.4666\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 6.2465 - val_loss: 5.9153\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2110 - val_loss: 5.0936\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5468 - val_loss: 4.5148\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0572 - val_loss: 4.0692\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6687 - val_loss: 3.7055\n", - "73/73 [==============================] - 0s 841us/step - loss: 3.5474\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 3.9042 - val_loss: 3.6606\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2033 - val_loss: 3.0084\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6208 - val_loss: 2.4856\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1696 - val_loss: 2.0959\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8462 - val_loss: 1.8225\n", - "73/73 [==============================] - 0s 768us/step - loss: 1.8580\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1809 - val_loss: 5.2159\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7321 - val_loss: 4.7815\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3322 - val_loss: 4.3933\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9754 - val_loss: 4.0465\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6573 - val_loss: 3.7380\n", - "73/73 [==============================] - 0s 752us/step - loss: 3.6584\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9606 - val_loss: 3.9413\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5617 - val_loss: 3.5537\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2133 - val_loss: 3.2285\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9256 - val_loss: 2.9599\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6898 - val_loss: 2.7389\n", - "73/73 [==============================] - 0s 802us/step - loss: 2.6559\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6546 - val_loss: 3.3819\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7832 - val_loss: 2.5997\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1379 - val_loss: 2.0510\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.7024 - val_loss: 1.6962\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4298 - val_loss: 1.4806\n", - "73/73 [==============================] - 0s 768us/step - loss: 1.3719\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7044 - val_loss: 4.7553\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4115 - val_loss: 4.4583\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1317 - val_loss: 4.1766\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8679 - val_loss: 3.9124\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6213 - val_loss: 3.6679\n", - "73/73 [==============================] - 0s 789us/step - loss: 3.5700\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 6.4483 - val_loss: 6.3384\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.7673 - val_loss: 5.7199\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2470 - val_loss: 5.2353\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8259 - val_loss: 4.8348\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4692 - val_loss: 4.4917\n", - "73/73 [==============================] - 0s 712us/step - loss: 4.1401\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1407 - val_loss: 5.2478\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8255 - val_loss: 4.9199\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5027 - val_loss: 4.5807\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1684 - val_loss: 4.2296\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8238 - val_loss: 3.8692\n", - "73/73 [==============================] - 0s 756us/step - loss: 3.7946\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8164 - val_loss: 2.2268\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5699 - val_loss: 1.4041\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2547 - val_loss: 1.2807\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1635 - val_loss: 1.1822\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0655 - val_loss: 1.0599\n", - "73/73 [==============================] - 0s 769us/step - loss: 0.9810\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1112 - val_loss: 0.9361\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9073 - val_loss: 0.8002\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7622 - val_loss: 0.7562\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7229 - val_loss: 0.7316\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6987 - val_loss: 0.7142\n", - "73/73 [==============================] - 0s 736us/step - loss: 0.6631\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0735 - val_loss: 2.7662\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0523 - val_loss: 1.8406\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5535 - val_loss: 1.5643\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3983 - val_loss: 1.4638\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3375 - val_loss: 1.4213\n", - "73/73 [==============================] - 0s 733us/step - loss: 1.3532\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1647 - val_loss: 1.2594\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8901 - val_loss: 0.7461\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7112 - val_loss: 0.6847\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6512 - val_loss: 0.6600\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6202 - val_loss: 0.6429\n", - "73/73 [==============================] - 0s 760us/step - loss: 0.6105\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6196 - val_loss: 2.1105\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2773 - val_loss: 1.0128\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8684 - val_loss: 0.8572\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7730 - val_loss: 0.7800\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7122 - val_loss: 0.7340\n", - "73/73 [==============================] - 0s 767us/step - loss: 0.7272\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1645 - val_loss: 1.4047\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3185 - val_loss: 1.3768\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3059 - val_loss: 1.3599\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2536 - val_loss: 1.2030\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9006 - val_loss: 0.7378\n", - "73/73 [==============================] - 0s 745us/step - loss: 0.6701\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8391 - val_loss: 1.0291\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7378 - val_loss: 0.6757\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6283 - val_loss: 0.6389\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5868 - val_loss: 0.6022\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5633 - val_loss: 0.5759\n", - "73/73 [==============================] - 0s 725us/step - loss: 0.5503\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3896 - val_loss: 0.8116\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5921 - val_loss: 0.5399\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4832 - val_loss: 0.4902\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4633 - val_loss: 0.4758\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4485 - val_loss: 0.4555\n", - "73/73 [==============================] - 0s 728us/step - loss: 0.4475\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8986 - val_loss: 1.5373\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2718 - val_loss: 1.2268\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1360 - val_loss: 1.1021\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9994 - val_loss: 0.9501\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8440 - val_loss: 0.7985\n", - "73/73 [==============================] - 0s 743us/step - loss: 0.7443\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8063 - val_loss: 1.7672\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.2334 - val_loss: 1.2109\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9533 - val_loss: 0.8573\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7108 - val_loss: 0.6874\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6012 - val_loss: 0.5960\n", - "73/73 [==============================] - 0s 742us/step - loss: 0.5926\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1771 - val_loss: 0.8257\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6659 - val_loss: 0.5984\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5285 - val_loss: 0.5065\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4763 - val_loss: 0.4782\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4529 - val_loss: 0.4741\n", - "73/73 [==============================] - 0s 755us/step - loss: 0.4219\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5939 - val_loss: 0.9120\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6828 - val_loss: 0.6495\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5929 - val_loss: 0.6132\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5457 - val_loss: 0.5562\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5004 - val_loss: 0.5157\n", - "73/73 [==============================] - 0s 774us/step - loss: 0.4834\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9915 - val_loss: 0.6667\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5703 - val_loss: 0.5541\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4845 - val_loss: 0.4962\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4528 - val_loss: 0.4982\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4361 - val_loss: 0.4585\n", - "73/73 [==============================] - 0s 729us/step - loss: 0.4401\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0619 - val_loss: 0.7947\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6585 - val_loss: 0.6640\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5914 - val_loss: 0.6046\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5141 - val_loss: 0.5454\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4708 - val_loss: 0.4902\n", - "73/73 [==============================] - 0s 736us/step - loss: 0.4912\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6543 - val_loss: 1.3844\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3007 - val_loss: 1.3839\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2982 - val_loss: 1.3854\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2926 - val_loss: 1.3732\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2319 - val_loss: 1.1939\n", - "73/73 [==============================] - 0s 780us/step - loss: 1.1303\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.7917 - val_loss: 5.5066\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8560 - val_loss: 4.7270\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1973 - val_loss: 4.1366\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6845 - val_loss: 3.6708\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2759 - val_loss: 3.2976\n", - "73/73 [==============================] - 0s 701us/step - loss: 2.9728\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9083 - val_loss: 4.7492\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0715 - val_loss: 3.9193\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2992 - val_loss: 3.1821\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6500 - val_loss: 2.5896\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1446 - val_loss: 2.1322\n", - "73/73 [==============================] - 0s 777us/step - loss: 1.9510\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8912 - val_loss: 4.6190\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0104 - val_loss: 3.7542\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2130 - val_loss: 2.9729\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5034 - val_loss: 2.3048\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9377 - val_loss: 1.8098\n", - "73/73 [==============================] - 0s 774us/step - loss: 1.7352\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5316 - val_loss: 4.3711\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6231 - val_loss: 3.5695\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9017 - val_loss: 2.9258\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3273 - val_loss: 2.4130\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8888 - val_loss: 2.0205\n", - "73/73 [==============================] - 0s 728us/step - loss: 2.0817\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8721 - val_loss: 4.6714\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0584 - val_loss: 3.9194\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4054 - val_loss: 3.3343\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9067 - val_loss: 2.8892\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5348 - val_loss: 2.5585\n", - "73/73 [==============================] - 0s 763us/step - loss: 2.5073\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7243 - val_loss: 4.7440\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3244 - val_loss: 4.3444\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9538 - val_loss: 3.9732\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6101 - val_loss: 3.6294\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2937 - val_loss: 3.3129\n", - "73/73 [==============================] - 0s 705us/step - loss: 2.9149\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0703 - val_loss: 5.0427\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5272 - val_loss: 4.5287\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0533 - val_loss: 4.0586\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6127 - val_loss: 3.6163\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1983 - val_loss: 3.1995\n", - "73/73 [==============================] - 0s 760us/step - loss: 3.0800\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 4.6851 - val_loss: 4.5879\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1190 - val_loss: 4.0380\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6336 - val_loss: 3.5667\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2192 - val_loss: 3.1651\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.8675 - val_loss: 2.8312\n", - "73/73 [==============================] - 0s 755us/step - loss: 2.6997\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.4760 - val_loss: 5.4169\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9283 - val_loss: 4.8717\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4226 - val_loss: 4.3805\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9710 - val_loss: 3.9409\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5678 - val_loss: 3.5484\n", - "73/73 [==============================] - 0s 777us/step - loss: 3.2222\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.8757 - val_loss: 5.9164\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.4443 - val_loss: 5.5145\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0767 - val_loss: 5.1648\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7543 - val_loss: 4.8552\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4664 - val_loss: 4.5752\n", - "73/73 [==============================] - 0s 847us/step - loss: 4.4622\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8548 - val_loss: 1.0768\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9701 - val_loss: 0.8759\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8118 - val_loss: 0.8077\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7587 - val_loss: 0.7710\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7263 - val_loss: 0.7434\n", - "73/73 [==============================] - 0s 796us/step - loss: 0.6879\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3203 - val_loss: 1.2681\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0524 - val_loss: 0.9230\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8676 - val_loss: 0.8473\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8041 - val_loss: 0.8130\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7672 - val_loss: 0.7900\n", - "73/73 [==============================] - 0s 786us/step - loss: 0.7414\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0079 - val_loss: 1.2676\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9810 - val_loss: 0.8294\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7474 - val_loss: 0.7407\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.6849 - val_loss: 0.7044\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6510 - val_loss: 0.6769\n", - "73/73 [==============================] - 0s 759us/step - loss: 0.6535\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5609 - val_loss: 1.0983\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9663 - val_loss: 0.7983\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7570 - val_loss: 0.7465\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7131 - val_loss: 0.7205\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6837 - val_loss: 0.6961\n", - "73/73 [==============================] - 0s 719us/step - loss: 0.6847\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5413 - val_loss: 1.5157\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9893 - val_loss: 0.8520\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7465 - val_loss: 0.7559\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6857 - val_loss: 0.7235\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6615 - val_loss: 0.7037\n", - "73/73 [==============================] - 0s 761us/step - loss: 0.6941\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4185 - val_loss: 0.6902\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5879 - val_loss: 0.5830\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5355 - val_loss: 0.5440\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5050 - val_loss: 0.5140\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4823 - val_loss: 0.4859\n", - "73/73 [==============================] - 0s 758us/step - loss: 0.4416\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3205 - val_loss: 0.7835\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6445 - val_loss: 0.6327\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5660 - val_loss: 0.5760\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5259 - val_loss: 0.5444\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5085 - val_loss: 0.5290\n", - "73/73 [==============================] - 0s 737us/step - loss: 0.4999\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3146 - val_loss: 0.7413\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6855 - val_loss: 0.7386\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5709 - val_loss: 0.5905\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5327 - val_loss: 0.5569\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5035 - val_loss: 0.5270\n", - "73/73 [==============================] - 0s 795us/step - loss: 0.4928\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5669 - val_loss: 0.7950\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6954 - val_loss: 0.6713\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5902 - val_loss: 0.6095\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5351 - val_loss: 0.5577\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5012 - val_loss: 0.5367\n", - "73/73 [==============================] - 0s 723us/step - loss: 0.5204\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4189 - val_loss: 0.8067\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6699 - val_loss: 0.6762\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5838 - val_loss: 0.5925\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5286 - val_loss: 0.5424\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4895 - val_loss: 0.5176\n", - "73/73 [==============================] - 0s 781us/step - loss: 0.5051\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8700 - val_loss: 0.6013\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5352 - val_loss: 0.5694\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4747 - val_loss: 0.4876\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5306 - val_loss: 0.6161\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5256 - val_loss: 0.4948\n", - "73/73 [==============================] - 0s 698us/step - loss: 0.4264\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3430 - val_loss: 0.9737\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6794 - val_loss: 0.6473\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5394 - val_loss: 0.5385\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4644 - val_loss: 0.4713\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4255 - val_loss: 0.4483\n", - "73/73 [==============================] - 0s 770us/step - loss: 0.4232\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.9522 - val_loss: 0.5819\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5049 - val_loss: 0.5152\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4507 - val_loss: 0.4787\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4191 - val_loss: 0.4356\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4099 - val_loss: 0.4782\n", - "73/73 [==============================] - 0s 769us/step - loss: 0.4518\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8503 - val_loss: 0.6028\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5124 - val_loss: 0.4812\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4491 - val_loss: 0.4503\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4296 - val_loss: 0.4807\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4181 - val_loss: 0.4303\n", - "73/73 [==============================] - 0s 786us/step - loss: 0.4351\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7041 - val_loss: 0.5026\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4537 - val_loss: 0.4562\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4266 - val_loss: 0.5293\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4232 - val_loss: 0.4389\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3985 - val_loss: 0.4347\n", - "73/73 [==============================] - 0s 845us/step - loss: 0.4432\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4321 - val_loss: 4.0923\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5260 - val_loss: 3.2200\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7483 - val_loss: 2.4906\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1210 - val_loss: 1.9278\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6664 - val_loss: 1.5521\n", - "73/73 [==============================] - 0s 781us/step - loss: 1.6628\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2358 - val_loss: 4.9881\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3008 - val_loss: 4.1395\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5633 - val_loss: 3.4276\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9315 - val_loss: 2.8350\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.4018 - val_loss: 2.3065\n", - "73/73 [==============================] - 0s 818us/step - loss: 2.2094\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 4.2980 - val_loss: 3.7346\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0664 - val_loss: 2.7511\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3307 - val_loss: 2.1660\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8981 - val_loss: 1.8161\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6396 - val_loss: 1.6028\n", - "73/73 [==============================] - 0s 773us/step - loss: 1.5083\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.8935 - val_loss: 5.1293\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2542 - val_loss: 3.7806\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1549 - val_loss: 2.8201\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3597 - val_loss: 2.1546\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8199 - val_loss: 1.7272\n", - "73/73 [==============================] - 0s 818us/step - loss: 1.7360\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1310 - val_loss: 4.2771\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3557 - val_loss: 2.9178\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3694 - val_loss: 2.1440\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8242 - val_loss: 1.7344\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5293 - val_loss: 1.5075\n", - "73/73 [==============================] - 0s 775us/step - loss: 1.3935\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2488 - val_loss: 5.0350\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5001 - val_loss: 4.3541\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9170 - val_loss: 3.8163\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4524 - val_loss: 3.3839\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0762 - val_loss: 3.0316\n", - "73/73 [==============================] - 0s 771us/step - loss: 2.7984\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0998 - val_loss: 5.0259\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4961 - val_loss: 4.4406\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9670 - val_loss: 3.9189\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4932 - val_loss: 3.4478\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0657 - val_loss: 3.0211\n", - "73/73 [==============================] - 0s 752us/step - loss: 2.9453\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.5333 - val_loss: 5.3007\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6258 - val_loss: 4.4978\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9411 - val_loss: 3.8734\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3986 - val_loss: 3.3748\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9716 - val_loss: 2.9850\n", - "73/73 [==============================] - 0s 778us/step - loss: 2.8356\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.6609 - val_loss: 5.4999\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0125 - val_loss: 4.9175\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4936 - val_loss: 4.4305\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0459 - val_loss: 3.9985\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6400 - val_loss: 3.6020\n", - "73/73 [==============================] - 0s 768us/step - loss: 3.3525\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5816 - val_loss: 4.5968\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1501 - val_loss: 4.1635\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7429 - val_loss: 3.7533\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3596 - val_loss: 3.3686\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0028 - val_loss: 3.0107\n", - "73/73 [==============================] - 0s 768us/step - loss: 2.9379\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1063 - val_loss: 1.3336\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0773 - val_loss: 1.0099\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8682 - val_loss: 0.8611\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7651 - val_loss: 0.7760\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7061 - val_loss: 0.7249\n", - "73/73 [==============================] - 0s 810us/step - loss: 0.6433\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 2.6956 - val_loss: 1.2067\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9746 - val_loss: 0.8711\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7953 - val_loss: 0.7796\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.7263 - val_loss: 0.7387\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6883 - val_loss: 0.7117\n", - "73/73 [==============================] - 0s 823us/step - loss: 0.6568\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9465 - val_loss: 0.9631\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7787 - val_loss: 0.7944\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6959 - val_loss: 0.7343\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6613 - val_loss: 0.6995\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6373 - val_loss: 0.6716\n", - "73/73 [==============================] - 0s 767us/step - loss: 0.6202\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1517 - val_loss: 1.5213\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1584 - val_loss: 1.1266\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9213 - val_loss: 0.9497\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8054 - val_loss: 0.8517\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7365 - val_loss: 0.7903\n", - "73/73 [==============================] - 0s 759us/step - loss: 0.7672\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1020 - val_loss: 1.1096\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9260 - val_loss: 0.8256\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7213 - val_loss: 0.7451\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6662 - val_loss: 0.7047\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6375 - val_loss: 0.6834\n", - "73/73 [==============================] - 0s 818us/step - loss: 0.6531\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2426 - val_loss: 0.7222\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6078 - val_loss: 0.6015\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5371 - val_loss: 0.5525\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4924 - val_loss: 0.5019\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4585 - val_loss: 0.4687\n", - "73/73 [==============================] - 0s 827us/step - loss: 0.4070\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1237 - val_loss: 0.6358\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5595 - val_loss: 0.5574\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4957 - val_loss: 0.5184\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4621 - val_loss: 0.4803\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4386 - val_loss: 0.4552\n", - "73/73 [==============================] - 0s 785us/step - loss: 0.4386\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2753 - val_loss: 0.6914\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5911 - val_loss: 0.5660\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5103 - val_loss: 0.4981\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4599 - val_loss: 0.4571\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4301 - val_loss: 0.4516\n", - "73/73 [==============================] - 0s 848us/step - loss: 0.4486\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 1.6190 - val_loss: 0.8127\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6327 - val_loss: 0.6364\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5491 - val_loss: 0.5778\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5029 - val_loss: 0.5331\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4679 - val_loss: 0.4893\n", - "73/73 [==============================] - 0s 955us/step - loss: 0.4766\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 1.0793 - val_loss: 0.7095\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6015 - val_loss: 0.6126\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5305 - val_loss: 0.5503\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4877 - val_loss: 0.5056\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4545 - val_loss: 0.4873\n", - "73/73 [==============================] - 0s 781us/step - loss: 0.4746\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3182 - val_loss: 0.8138\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2858 - val_loss: 0.6439\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7415 - val_loss: 0.4850\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4420 - val_loss: 0.4239\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3981 - val_loss: 0.3875\n", - "73/73 [==============================] - 0s 782us/step - loss: 0.3638\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8665 - val_loss: 0.5844\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4757 - val_loss: 0.4769\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4328 - val_loss: 0.4539\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4186 - val_loss: 0.4185\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4038 - val_loss: 0.4857\n", - "73/73 [==============================] - 0s 786us/step - loss: 0.4802\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9001 - val_loss: 0.5798\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5173 - val_loss: 0.4661\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4239 - val_loss: 0.4287\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4030 - val_loss: 0.4171\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3875 - val_loss: 0.3909\n", - "73/73 [==============================] - 0s 792us/step - loss: 0.3829\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8028 - val_loss: 0.5519\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4774 - val_loss: 0.4744\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4325 - val_loss: 0.4432\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4083 - val_loss: 0.4202\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3933 - val_loss: 0.4209\n", - "73/73 [==============================] - 0s 796us/step - loss: 0.4104\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8573 - val_loss: 0.5585\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4738 - val_loss: 0.4443\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4111 - val_loss: 0.4215\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3901 - val_loss: 0.4426\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3777 - val_loss: 0.3919\n", - "73/73 [==============================] - 0s 798us/step - loss: 0.3917\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1141 - val_loss: 3.5747\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.8839 - val_loss: 2.5261\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0667 - val_loss: 1.8818\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5968 - val_loss: 1.5333\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3491 - val_loss: 1.3440\n", - "73/73 [==============================] - 0s 769us/step - loss: 1.3081\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.8441 - val_loss: 4.7844\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7908 - val_loss: 3.3553\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7841 - val_loss: 2.5822\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.2166 - val_loss: 2.1028\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8254 - val_loss: 1.7699\n", - "73/73 [==============================] - 0s 817us/step - loss: 1.6087\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.8614 - val_loss: 5.4655\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6531 - val_loss: 4.3750\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7168 - val_loss: 3.4835\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9447 - val_loss: 2.7421\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3265 - val_loss: 2.1625\n", - "73/73 [==============================] - 0s 853us/step - loss: 2.0609\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 4.4010 - val_loss: 3.7329\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9500 - val_loss: 2.5677\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0370 - val_loss: 1.8859\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5229 - val_loss: 1.5248\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2568 - val_loss: 1.3283\n", - "73/73 [==============================] - 0s 855us/step - loss: 1.4225\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 4.9055 - val_loss: 4.2814\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4277 - val_loss: 3.0351\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.4407 - val_loss: 2.1992\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8022 - val_loss: 1.6755\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4239 - val_loss: 1.3782\n", - "73/73 [==============================] - 0s 880us/step - loss: 1.2844\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1431 - val_loss: 5.0661\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6194 - val_loss: 4.5511\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1399 - val_loss: 4.0791\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7000 - val_loss: 3.6475\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2990 - val_loss: 3.2564\n", - "73/73 [==============================] - 0s 764us/step - loss: 2.9494\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2728 - val_loss: 5.1928\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5973 - val_loss: 4.5492\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0175 - val_loss: 3.9949\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5177 - val_loss: 3.5193\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0899 - val_loss: 3.1145\n", - "73/73 [==============================] - 0s 874us/step - loss: 2.9492\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.3614 - val_loss: 5.0930\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4551 - val_loss: 4.2498\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7167 - val_loss: 3.5631\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1198 - val_loss: 3.0094\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6447 - val_loss: 2.5734\n", - "73/73 [==============================] - 0s 888us/step - loss: 2.4536\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 4.2307 - val_loss: 4.1582\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7868 - val_loss: 3.7423\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4009 - val_loss: 3.3811\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0655 - val_loss: 3.0710\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7788 - val_loss: 2.8097\n", - "73/73 [==============================] - 0s 828us/step - loss: 2.7479\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.8661 - val_loss: 5.4317\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6532 - val_loss: 4.3866\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7803 - val_loss: 3.5986\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1205 - val_loss: 2.9956\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6163 - val_loss: 2.5264\n", - "73/73 [==============================] - 0s 876us/step - loss: 2.4779\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.2261 - val_loss: 0.9583\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8043 - val_loss: 0.8115\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7319 - val_loss: 0.7635\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6973 - val_loss: 0.7310\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6703 - val_loss: 0.7025\n", - "73/73 [==============================] - 0s 816us/step - loss: 0.6313\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5034 - val_loss: 1.1041\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9015 - val_loss: 0.8057\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6911 - val_loss: 0.7080\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6269 - val_loss: 0.6614\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5910 - val_loss: 0.6327\n", - "73/73 [==============================] - 0s 813us/step - loss: 0.5782\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 1.7573 - val_loss: 0.9562\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7707 - val_loss: 0.7619\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6746 - val_loss: 0.7049\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6322 - val_loss: 0.6683\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6008 - val_loss: 0.6372\n", - "73/73 [==============================] - 0s 819us/step - loss: 0.5852\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.3063 - val_loss: 1.2280\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8922 - val_loss: 0.7676\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6798 - val_loss: 0.6833\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6241 - val_loss: 0.6524\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5948 - val_loss: 0.6346\n", - "73/73 [==============================] - 0s 763us/step - loss: 0.6080\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.4504 - val_loss: 1.1537\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9448 - val_loss: 0.8474\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7427 - val_loss: 0.7520\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6787 - val_loss: 0.7123\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6458 - val_loss: 0.6847\n", - "73/73 [==============================] - 0s 830us/step - loss: 0.6783\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 2ms/step - loss: 1.0933 - val_loss: 0.6732\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6431 - val_loss: 0.6026\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5387 - val_loss: 0.5419\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4936 - val_loss: 0.4938\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4602 - val_loss: 0.4644\n", - "73/73 [==============================] - 0s 913us/step - loss: 0.4152\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 1.1063 - val_loss: 0.8657\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5982 - val_loss: 0.5560\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4924 - val_loss: 0.5038\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 0.4548 - val_loss: 0.4688\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4277 - val_loss: 0.4545\n", - "73/73 [==============================] - 0s 797us/step - loss: 0.4458\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5650 - val_loss: 0.7463\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6417 - val_loss: 0.6208\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5449 - val_loss: 0.5523\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4925 - val_loss: 0.4965\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4490 - val_loss: 0.4607\n", - "73/73 [==============================] - 0s 1000us/step - loss: 0.4394\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.9929 - val_loss: 0.6093\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5291 - val_loss: 0.5298\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4567 - val_loss: 0.4904\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4287 - val_loss: 0.4588\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4139 - val_loss: 0.4450\n", - "73/73 [==============================] - 0s 748us/step - loss: 0.4505\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0812 - val_loss: 0.6582\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5624 - val_loss: 0.5579\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4931 - val_loss: 0.5225\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4539 - val_loss: 0.4722\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4441 - val_loss: 0.4554\n", - "73/73 [==============================] - 0s 760us/step - loss: 0.4435\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7786 - val_loss: 0.5408\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4915 - val_loss: 0.4941\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4460 - val_loss: 0.4519\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4183 - val_loss: 0.4295\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4007 - val_loss: 0.4090\n", - "73/73 [==============================] - 0s 798us/step - loss: 0.3579\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1188 - val_loss: 1.0100\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1253 - val_loss: 0.4988\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5964 - val_loss: 0.4494\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4318 - val_loss: 0.4240\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3866 - val_loss: 0.4208\n", - "73/73 [==============================] - 0s 799us/step - loss: 0.3965\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0682 - val_loss: 0.6272\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5248 - val_loss: 0.5101\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4620 - val_loss: 0.4543\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4179 - val_loss: 0.4229\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4049 - val_loss: 0.4211\n", - "73/73 [==============================] - 0s 785us/step - loss: 0.4030\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1009 - val_loss: 0.7491\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9054 - val_loss: 0.5879\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5744 - val_loss: 1.0128\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2589 - val_loss: 0.4474\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4114 - val_loss: 0.4455\n", - "73/73 [==============================] - 0s 904us/step - loss: 0.4309\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8623 - val_loss: 0.6990\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6174 - val_loss: 0.4897\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4349 - val_loss: 0.4553\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.4001 - val_loss: 0.4073\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3823 - val_loss: 0.3919\n", - "73/73 [==============================] - 0s 874us/step - loss: 0.4008\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 6.3013 - val_loss: 5.7589\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1249 - val_loss: 4.9927\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4889 - val_loss: 4.4043\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9310 - val_loss: 3.8428\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3990 - val_loss: 3.3286\n", - "73/73 [==============================] - 0s 872us/step - loss: 3.0450\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 5.6495 - val_loss: 5.5200\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9507 - val_loss: 4.9279\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4357 - val_loss: 4.4535\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0094 - val_loss: 4.0501\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6432 - val_loss: 3.6985\n", - "73/73 [==============================] - 0s 793us/step - loss: 3.5603\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 5.3373 - val_loss: 5.1403\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4719 - val_loss: 4.3037\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6983 - val_loss: 3.5647\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0572 - val_loss: 2.9824\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5636 - val_loss: 2.5391\n", - "73/73 [==============================] - 0s 895us/step - loss: 2.4208\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 6.4834 - val_loss: 5.6793\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8349 - val_loss: 4.5092\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9290 - val_loss: 3.7470\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2870 - val_loss: 3.1649\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7816 - val_loss: 2.6934\n", - "73/73 [==============================] - 0s 844us/step - loss: 2.4276\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.3311 - val_loss: 5.3507\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8511 - val_loss: 4.9022\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4468 - val_loss: 4.5162\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0957 - val_loss: 4.1770\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7860 - val_loss: 3.8764\n", - "73/73 [==============================] - 0s 821us/step - loss: 3.7849\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 5.3911 - val_loss: 5.3873\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 4.9861 - val_loss: 4.9873\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6055 - val_loss: 4.6125\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2501 - val_loss: 4.2620\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9193 - val_loss: 3.9388\n", - "73/73 [==============================] - 0s 732us/step - loss: 3.6123\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 3ms/step - loss: 5.5293 - val_loss: 5.5240\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0536 - val_loss: 5.1001\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6797 - val_loss: 4.7466\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3571 - val_loss: 4.4317\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0642 - val_loss: 4.1392\n", - "73/73 [==============================] - 0s 888us/step - loss: 4.0325\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1841 - val_loss: 5.1848\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6986 - val_loss: 4.7079\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2596 - val_loss: 4.2800\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8662 - val_loss: 3.8965\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5141 - val_loss: 3.5534\n", - "73/73 [==============================] - 0s 960us/step - loss: 3.4113\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.7287 - val_loss: 5.6310\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1762 - val_loss: 5.1137\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7019 - val_loss: 4.6683\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2917 - val_loss: 4.2787\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9323 - val_loss: 3.9353\n", - "73/73 [==============================] - 0s 808us/step - loss: 3.6054\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 5.5706 - val_loss: 5.6552\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1842 - val_loss: 5.2904\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8508 - val_loss: 4.9642\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5446 - val_loss: 4.6556\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2481 - val_loss: 4.3469\n", - "73/73 [==============================] - 0s 899us/step - loss: 4.2459\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.7815 - val_loss: 1.3474\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1558 - val_loss: 1.0436\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9509 - val_loss: 0.9115\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8164 - val_loss: 0.8253\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7588 - val_loss: 0.7850\n", - "73/73 [==============================] - 0s 921us/step - loss: 0.7184\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7473 - val_loss: 2.0229\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3204 - val_loss: 1.1725\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0402 - val_loss: 1.0226\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9047 - val_loss: 0.8877\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7979 - val_loss: 0.8043\n", - "73/73 [==============================] - 0s 788us/step - loss: 0.7757\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2130 - val_loss: 1.2914\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1067 - val_loss: 1.0467\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9310 - val_loss: 0.9050\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8237 - val_loss: 0.8164\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7445 - val_loss: 0.7477\n", - "73/73 [==============================] - 0s 806us/step - loss: 0.7259\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8627 - val_loss: 2.7832\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.1044 - val_loss: 1.8399\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5691 - val_loss: 1.5350\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.4011 - val_loss: 1.4344\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3485 - val_loss: 1.3994\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.2576\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2574 - val_loss: 1.2454\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0425 - val_loss: 0.9822\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8597 - val_loss: 0.8456\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7471 - val_loss: 0.7584\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.6779 - val_loss: 0.7159\n", - "73/73 [==============================] - 0s 763us/step - loss: 0.6982\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6063 - val_loss: 0.9563\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7833 - val_loss: 0.7308\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6681 - val_loss: 0.6596\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6114 - val_loss: 0.6140\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5704 - val_loss: 0.5707\n", - "73/73 [==============================] - 0s 826us/step - loss: 0.5110\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3129 - val_loss: 0.7344\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6429 - val_loss: 0.6443\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5796 - val_loss: 0.5974\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5417 - val_loss: 0.5664\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5143 - val_loss: 0.5434\n", - "73/73 [==============================] - 0s 749us/step - loss: 0.5032\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6273 - val_loss: 1.1466\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8452 - val_loss: 0.7011\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6058 - val_loss: 0.5820\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5097 - val_loss: 0.4980\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4585 - val_loss: 0.4616\n", - "73/73 [==============================] - 0s 814us/step - loss: 0.4293\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9481 - val_loss: 1.1985\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9156 - val_loss: 0.7770\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6906 - val_loss: 0.7080\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6333 - val_loss: 0.6558\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5943 - val_loss: 0.6224\n", - "73/73 [==============================] - 0s 777us/step - loss: 0.6007\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 1.5283 - val_loss: 1.0011\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8318 - val_loss: 0.7538\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6750 - val_loss: 0.6752\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6013 - val_loss: 0.6110\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5507 - val_loss: 0.5656\n", - "73/73 [==============================] - 0s 833us/step - loss: 0.5598\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0966 - val_loss: 0.7408\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6507 - val_loss: 0.6474\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5789 - val_loss: 0.5892\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5334 - val_loss: 0.5373\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4910 - val_loss: 0.5109\n", - "73/73 [==============================] - 0s 808us/step - loss: 0.4498\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3416 - val_loss: 0.6886\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5824 - val_loss: 0.5943\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4919 - val_loss: 0.5130\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4395 - val_loss: 0.4550\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4179 - val_loss: 0.4339\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4090\n", - "Epoch 1/5\n", - "291/291 [==============================] - 2s 6ms/step - loss: 0.9098 - val_loss: 0.7025\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.5895 - val_loss: 0.5666\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4770 - val_loss: 0.4795\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4287 - val_loss: 0.5119\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4126 - val_loss: 0.4287\n", - "73/73 [==============================] - 0s 800us/step - loss: 0.4060\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8611 - val_loss: 0.6640\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5389 - val_loss: 4.0906\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.4844 - val_loss: 0.4656\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4273 - val_loss: 0.4678\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 0.4154 - val_loss: 0.4393\n", - "73/73 [==============================] - 0s 1ms/step - loss: 0.4407\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.5652 - val_loss: 1.2856\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8777 - val_loss: 0.6851\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6127 - val_loss: 0.6236\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5360 - val_loss: 0.5387\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4947 - val_loss: 0.5119\n", - "73/73 [==============================] - 0s 883us/step - loss: 0.5119\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 6.2815 - val_loss: 5.8696\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2052 - val_loss: 5.0919\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5904 - val_loss: 4.5599\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1326 - val_loss: 4.1366\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7566 - val_loss: 3.7805\n", - "73/73 [==============================] - 0s 809us/step - loss: 3.4556\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1626 - val_loss: 4.9619\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2879 - val_loss: 4.0761\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4177 - val_loss: 3.1874\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6382 - val_loss: 2.4648\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0553 - val_loss: 1.9711\n", - "73/73 [==============================] - 0s 785us/step - loss: 1.8437\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8162 - val_loss: 4.3934\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6430 - val_loss: 3.3615\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7633 - val_loss: 2.6008\n", - "Epoch 4/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 2.1290 - val_loss: 2.0789\n", - "Epoch 5/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 1.7092 - val_loss: 1.7428\n", - "73/73 [==============================] - 0s 855us/step - loss: 1.5776\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.0626 - val_loss: 4.6340\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8428 - val_loss: 3.3945\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.8159 - val_loss: 2.5003\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.1188 - val_loss: 1.9421\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 1.7170 - val_loss: 1.6501\n", - "73/73 [==============================] - 0s 1ms/step - loss: 1.5723\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.6103 - val_loss: 5.4771\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8104 - val_loss: 4.7596\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1947 - val_loss: 4.1711\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6711 - val_loss: 3.6550\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 3.2036 - val_loss: 3.1872\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.1116\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 6.0842 - val_loss: 5.8855\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2507 - val_loss: 5.1372\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6145 - val_loss: 4.5471\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 4.1047 - val_loss: 4.0676\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6863 - val_loss: 3.6688\n", - "73/73 [==============================] - 0s 1ms/step - loss: 3.3478\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2607 - val_loss: 5.0887\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5092 - val_loss: 4.3717\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8565 - val_loss: 3.7410\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2836 - val_loss: 3.1843\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7845 - val_loss: 2.7036\n", - "73/73 [==============================] - 0s 788us/step - loss: 2.6376\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2546 - val_loss: 5.3253\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9153 - val_loss: 4.9895\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5999 - val_loss: 4.6773\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3060 - val_loss: 4.3834\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0288 - val_loss: 4.1050\n", - "73/73 [==============================] - 0s 910us/step - loss: 3.9560\n", - "Epoch 1/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.6712 - val_loss: 5.7048\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 5.3128 - val_loss: 5.3599\n", - "Epoch 3/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.9973 - val_loss: 5.0518\n", - "Epoch 4/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 1s 3ms/step - loss: 4.7127 - val_loss: 4.7711\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4507 - val_loss: 4.5109\n", - "73/73 [==============================] - 0s 767us/step - loss: 4.1631\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0911 - val_loss: 4.0414\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6222 - val_loss: 3.5913\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 3.2316 - val_loss: 3.2131\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.9007 - val_loss: 2.8942\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6237 - val_loss: 2.6286\n", - "73/73 [==============================] - 0s 902us/step - loss: 2.4640\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9715 - val_loss: 0.9200\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7871 - val_loss: 0.7300\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6737 - val_loss: 0.6735\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6296 - val_loss: 0.6350\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 0.5985 - val_loss: 0.6077\n", - "73/73 [==============================] - 0s 807us/step - loss: 0.5657\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7332 - val_loss: 1.5698\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2712 - val_loss: 1.1817\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9797 - val_loss: 0.8973\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7523 - val_loss: 0.7174\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6471 - val_loss: 0.6661\n", - "73/73 [==============================] - 0s 816us/step - loss: 0.6276\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 2.6570 - val_loss: 1.7487\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3777 - val_loss: 1.2991\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1124 - val_loss: 1.0747\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9391 - val_loss: 0.9125\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8143 - val_loss: 0.8078\n", - "73/73 [==============================] - 0s 785us/step - loss: 0.7731\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7348 - val_loss: 1.4765\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1625 - val_loss: 1.1437\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9672 - val_loss: 0.9821\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8518 - val_loss: 0.8776\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7743 - val_loss: 0.8102\n", - "73/73 [==============================] - 0s 801us/step - loss: 0.7489\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3529 - val_loss: 1.3884\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1067 - val_loss: 0.9939\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9236 - val_loss: 0.8744\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8257 - val_loss: 0.8059\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7538 - val_loss: 0.7608\n", - "73/73 [==============================] - 0s 764us/step - loss: 0.7438\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1032 - val_loss: 0.6575\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5835 - val_loss: 0.5613\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5118 - val_loss: 0.5108\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4767 - val_loss: 0.4801\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4571 - val_loss: 0.4639\n", - "73/73 [==============================] - 0s 815us/step - loss: 0.4166\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3559 - val_loss: 0.8263\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7389 - val_loss: 0.7271\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6579 - val_loss: 0.6601\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5941 - val_loss: 0.6058\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5478 - val_loss: 0.5684\n", - "73/73 [==============================] - 0s 811us/step - loss: 0.5247\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6783 - val_loss: 1.2288\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9129 - val_loss: 0.7413\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6655 - val_loss: 0.6729\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6131 - val_loss: 0.6273\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5714 - val_loss: 0.5821\n", - "73/73 [==============================] - 0s 803us/step - loss: 0.5535\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2879 - val_loss: 0.8117\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6771 - val_loss: 0.6789\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5949 - val_loss: 0.6125\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5496 - val_loss: 0.5674\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5103 - val_loss: 0.5310\n", - "73/73 [==============================] - 0s 742us/step - loss: 0.5254\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8752 - val_loss: 0.5577\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4914 - val_loss: 0.5044\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4559 - val_loss: 0.4679\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4321 - val_loss: 0.4639\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4174 - val_loss: 0.4495\n", - "73/73 [==============================] - 0s 761us/step - loss: 0.4507\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9477 - val_loss: 0.6377\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5449 - val_loss: 0.5340\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4696 - val_loss: 0.4722\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4608 - val_loss: 0.5270\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4435 - val_loss: 0.4458\n", - "73/73 [==============================] - 0s 809us/step - loss: 0.4035\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7668 - val_loss: 0.5660\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4922 - val_loss: 0.5190\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4928 - val_loss: 0.4735\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4284 - val_loss: 0.4385\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4066 - val_loss: 0.4424\n", - "73/73 [==============================] - 0s 784us/step - loss: 0.4197\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9284 - val_loss: 0.5801\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5115 - val_loss: 0.5188\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4733 - val_loss: 0.4776\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4373 - val_loss: 0.4539\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4194 - val_loss: 0.4276\n", - "73/73 [==============================] - 0s 730us/step - loss: 0.4150\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 1.0626 - val_loss: 0.5508\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4793 - val_loss: 0.4605\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4268 - val_loss: 0.4409\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4026 - val_loss: 0.4066\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3897 - val_loss: 0.4155\n", - "73/73 [==============================] - 0s 748us/step - loss: 0.4110\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2784 - val_loss: 0.7612\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5864 - val_loss: 0.5595\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4915 - val_loss: 0.5030\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4468 - val_loss: 0.4685\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4165 - val_loss: 0.4279\n", - "73/73 [==============================] - 0s 782us/step - loss: 0.4290\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9371 - val_loss: 4.6894\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0600 - val_loss: 3.7815\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2163 - val_loss: 2.9661\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.4913 - val_loss: 2.2945\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9238 - val_loss: 1.8019\n", - "73/73 [==============================] - 0s 744us/step - loss: 1.6316\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.8375 - val_loss: 5.6362\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0092 - val_loss: 4.9306\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3940 - val_loss: 4.3509\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8743 - val_loss: 3.8614\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4377 - val_loss: 3.4476\n", - "73/73 [==============================] - 0s 862us/step - loss: 3.3255\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9759 - val_loss: 4.7916\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.1281 - val_loss: 3.9506\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3538 - val_loss: 3.1669\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6259 - val_loss: 2.4511\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0386 - val_loss: 1.9271\n", - "73/73 [==============================] - 0s 903us/step - loss: 1.7830\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4325 - val_loss: 3.8219\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1094 - val_loss: 2.7308\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2000 - val_loss: 2.0279\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6315 - val_loss: 1.6520\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3275 - val_loss: 1.4663\n", - "73/73 [==============================] - 0s 793us/step - loss: 1.6378\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.0869 - val_loss: 3.7319\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1193 - val_loss: 2.8999\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.4533 - val_loss: 2.3178\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0165 - val_loss: 1.9623\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.7559 - val_loss: 1.7491\n", - "73/73 [==============================] - 0s 806us/step - loss: 1.6231\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 2ms/step - loss: 5.1664 - val_loss: 5.1030\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6720 - val_loss: 4.6178\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2207 - val_loss: 4.1746\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8074 - val_loss: 3.7682\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4290 - val_loss: 3.3985\n", - "73/73 [==============================] - 0s 795us/step - loss: 3.1035\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 6.0166 - val_loss: 5.5802\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7776 - val_loss: 4.5192\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9017 - val_loss: 3.7250\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.2220 - val_loss: 3.0839\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6756 - val_loss: 2.5711\n", - "73/73 [==============================] - 0s 768us/step - loss: 2.4867\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.4434 - val_loss: 5.2485\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5123 - val_loss: 4.3123\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6942 - val_loss: 3.5131\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0165 - val_loss: 2.8602\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.4742 - val_loss: 2.3453\n", - "73/73 [==============================] - 0s 760us/step - loss: 2.2334\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.3227 - val_loss: 5.1978\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7438 - val_loss: 4.6406\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2253 - val_loss: 4.1376\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.7565 - val_loss: 3.6787\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3281 - val_loss: 3.2587\n", - "73/73 [==============================] - 0s 757us/step - loss: 3.0075\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.6666 - val_loss: 5.7201\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.2248 - val_loss: 5.3056\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8551 - val_loss: 4.9493\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.5315 - val_loss: 4.6310\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2401 - val_loss: 4.3405\n", - "73/73 [==============================] - 0s 793us/step - loss: 4.2490\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7997 - val_loss: 1.6501\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3973 - val_loss: 1.3409\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1599 - val_loss: 1.0921\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9079 - val_loss: 0.8049\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6927 - val_loss: 0.6544\n", - "73/73 [==============================] - 0s 803us/step - loss: 0.6152\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6487 - val_loss: 1.3596\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0703 - val_loss: 0.9311\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7962 - val_loss: 0.7387\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6686 - val_loss: 0.6662\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6169 - val_loss: 0.6439\n", - "73/73 [==============================] - 0s 735us/step - loss: 0.5928\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2719 - val_loss: 1.2279\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0016 - val_loss: 0.8730\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7864 - val_loss: 0.7710\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7165 - val_loss: 0.7285\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6785 - val_loss: 0.6995\n", - "73/73 [==============================] - 0s 733us/step - loss: 0.6663\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2156 - val_loss: 1.4413\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0867 - val_loss: 0.8911\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7237 - val_loss: 0.6792\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6057 - val_loss: 0.6232\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5600 - val_loss: 0.5879\n", - "73/73 [==============================] - 0s 749us/step - loss: 0.5722\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6005 - val_loss: 1.2742\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0069 - val_loss: 0.9386\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8379 - val_loss: 0.8460\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7713 - val_loss: 0.7948\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7299 - val_loss: 0.7609\n", - "73/73 [==============================] - 0s 799us/step - loss: 0.7603\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1858 - val_loss: 0.7471\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6326 - val_loss: 0.6372\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5705 - val_loss: 0.5735\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5218 - val_loss: 0.5324\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4867 - val_loss: 0.4955\n", - "73/73 [==============================] - 0s 810us/step - loss: 0.4433\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2401 - val_loss: 0.6763\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5773 - val_loss: 0.5786\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5061 - val_loss: 0.5134\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4800 - val_loss: 0.4791\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4337 - val_loss: 0.4505\n", - "73/73 [==============================] - 0s 829us/step - loss: 0.4331\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0934 - val_loss: 0.6417\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5383 - val_loss: 0.4981\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4561 - val_loss: 0.4645\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4288 - val_loss: 0.4454\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4108 - val_loss: 0.4257\n", - "73/73 [==============================] - 0s 764us/step - loss: 0.4131\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3462 - val_loss: 0.6799\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5891 - val_loss: 0.6265\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5313 - val_loss: 0.5565\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4904 - val_loss: 0.5141\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4598 - val_loss: 0.4770\n", - "73/73 [==============================] - 0s 730us/step - loss: 0.4786\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0841 - val_loss: 0.6720\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5812 - val_loss: 0.5870\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5172 - val_loss: 0.5331\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4688 - val_loss: 0.4948\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4430 - val_loss: 0.4565\n", - "73/73 [==============================] - 0s 822us/step - loss: 0.4480\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9568 - val_loss: 0.5939\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5135 - val_loss: 0.4955\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4699 - val_loss: 0.4599\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4333 - val_loss: 0.4295\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4062 - val_loss: 0.4118\n", - "73/73 [==============================] - 0s 856us/step - loss: 0.3633\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7367 - val_loss: 0.5581\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4635 - val_loss: 0.4780\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4197 - val_loss: 0.4290\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4007 - val_loss: 0.4019\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3878 - val_loss: 0.4003\n", - "73/73 [==============================] - 0s 771us/step - loss: 0.3965\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0118 - val_loss: 0.5843\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4891 - val_loss: 0.4795\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4323 - val_loss: 0.4583\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4018 - val_loss: 0.4223\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3855 - val_loss: 0.4107\n", - "73/73 [==============================] - 0s 766us/step - loss: 0.3947\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8421 - val_loss: 0.5637\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4724 - val_loss: 0.4849\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4231 - val_loss: 0.4400\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4043 - val_loss: 0.4176\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3900 - val_loss: 0.4057\n", - "73/73 [==============================] - 0s 770us/step - loss: 0.4130\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9421 - val_loss: 0.5976\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4795 - val_loss: 0.4711\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4108 - val_loss: 0.4365\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3857 - val_loss: 0.3944\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3764 - val_loss: 0.4734\n", - "73/73 [==============================] - 0s 792us/step - loss: 0.4805\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1226 - val_loss: 4.8427\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2120 - val_loss: 3.9700\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.4064 - val_loss: 3.1738\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6846 - val_loss: 2.4814\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0833 - val_loss: 1.9258\n", - "73/73 [==============================] - 0s 772us/step - loss: 1.7926\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9386 - val_loss: 4.4433\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.6643 - val_loss: 3.3120\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.7013 - val_loss: 2.4452\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0273 - val_loss: 1.8993\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.6428 - val_loss: 1.6118\n", - "73/73 [==============================] - 0s 817us/step - loss: 1.4589\n", - "Epoch 1/5\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "291/291 [==============================] - 0s 1ms/step - loss: 4.8066 - val_loss: 4.2130\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3039 - val_loss: 2.8527\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.2011 - val_loss: 1.9228\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.5465 - val_loss: 1.4534\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.2385 - val_loss: 1.2450\n", - "73/73 [==============================] - 0s 769us/step - loss: 1.0902\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1400 - val_loss: 4.8425\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2102 - val_loss: 3.9364\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3653 - val_loss: 3.1008\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6069 - val_loss: 2.3818\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9888 - val_loss: 1.8328\n", - "73/73 [==============================] - 0s 797us/step - loss: 1.6850\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.7017 - val_loss: 4.4679\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8174 - val_loss: 3.5983\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.0371 - val_loss: 2.8370\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3840 - val_loss: 2.2256\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.8937 - val_loss: 1.7944\n", - "73/73 [==============================] - 0s 783us/step - loss: 1.7399\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 6.4060 - val_loss: 5.8144\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.9339 - val_loss: 4.5489\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9202 - val_loss: 3.6587\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1834 - val_loss: 2.9946\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.6369 - val_loss: 2.5051\n", - "73/73 [==============================] - 0s 761us/step - loss: 2.4104\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.4875 - val_loss: 5.4503\n", - "Epoch 2/5\n", - "291/291 [==============================] - 1s 2ms/step - loss: 4.9220 - val_loss: 4.9081\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4276 - val_loss: 4.4201\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9750 - val_loss: 3.9686\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.5547 - val_loss: 3.5472\n", - "73/73 [==============================] - 0s 765us/step - loss: 3.4474\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.6257 - val_loss: 5.6028\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.1078 - val_loss: 5.1239\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.6758 - val_loss: 4.7036\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.2877 - val_loss: 4.3166\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.9267 - val_loss: 3.9512\n", - "73/73 [==============================] - 0s 807us/step - loss: 3.8029\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.7281 - val_loss: 5.5692\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.0263 - val_loss: 4.8897\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.4077 - val_loss: 4.2781\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8456 - val_loss: 3.7189\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3284 - val_loss: 3.2043\n", - "73/73 [==============================] - 0s 834us/step - loss: 2.9475\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 5.5883 - val_loss: 5.4816\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.8871 - val_loss: 4.8369\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 4.3170 - val_loss: 4.2917\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.8244 - val_loss: 3.8086\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.3839 - val_loss: 3.3738\n", - "73/73 [==============================] - 0s 769us/step - loss: 3.3017\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.3146 - val_loss: 1.1778\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8873 - val_loss: 0.7858\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6941 - val_loss: 0.7040\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6462 - val_loss: 0.6728\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6211 - val_loss: 0.6479\n", - "73/73 [==============================] - 0s 794us/step - loss: 0.5664\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.5020 - val_loss: 1.0313\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8088 - val_loss: 0.7115\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6434 - val_loss: 0.6479\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5974 - val_loss: 0.6170\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5683 - val_loss: 0.5921\n", - "73/73 [==============================] - 0s 751us/step - loss: 0.5555\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 2.0897 - val_loss: 0.8365\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6956 - val_loss: 0.6964\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6270 - val_loss: 0.6552\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5952 - val_loss: 0.6269\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5708 - val_loss: 0.6030\n", - "73/73 [==============================] - 0s 828us/step - loss: 0.5542\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 3.1561 - val_loss: 1.2809\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0264 - val_loss: 0.8961\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7903 - val_loss: 0.7505\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6835 - val_loss: 0.6897\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6280 - val_loss: 0.6581\n", - "73/73 [==============================] - 0s 837us/step - loss: 0.6230\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.9692 - val_loss: 0.9936\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7718 - val_loss: 0.7343\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.6364 - val_loss: 0.6621\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5856 - val_loss: 0.6187\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5528 - val_loss: 0.5922\n", - "73/73 [==============================] - 0s 778us/step - loss: 0.5798\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0973 - val_loss: 0.6307\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5624 - val_loss: 0.5529\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5066 - val_loss: 0.5035\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4665 - val_loss: 0.4688\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4362 - val_loss: 0.4720\n", - "73/73 [==============================] - 0s 817us/step - loss: 0.4205\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0686 - val_loss: 0.6908\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5946 - val_loss: 0.5710\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5093 - val_loss: 0.5047\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4572 - val_loss: 0.4585\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4262 - val_loss: 0.4433\n", - "73/73 [==============================] - 0s 798us/step - loss: 0.4275\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.1227 - val_loss: 0.7027\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5751 - val_loss: 0.5824\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4987 - val_loss: 0.5099\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4569 - val_loss: 0.4724\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4282 - val_loss: 0.4401\n", - "73/73 [==============================] - 0s 779us/step - loss: 0.4202\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.3362 - val_loss: 0.6359\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5624 - val_loss: 0.5943\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5819 - val_loss: 0.5202\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4860 - val_loss: 0.4772\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4429 - val_loss: 0.4553\n", - "73/73 [==============================] - 0s 896us/step - loss: 0.4583\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 1.0628 - val_loss: 0.6888\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.5495 - val_loss: 0.5319\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4531 - val_loss: 0.4723\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4088 - val_loss: 0.4239\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3857 - val_loss: 0.4092\n", - "73/73 [==============================] - 0s 782us/step - loss: 0.4066\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9645 - val_loss: 0.5424\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4844 - val_loss: 0.4623\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4253 - val_loss: 0.4155\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3982 - val_loss: 0.4078\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3823 - val_loss: 0.4521\n", - "73/73 [==============================] - 0s 746us/step - loss: 0.4314\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.9178 - val_loss: 0.5626\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4787 - val_loss: 0.4673\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4256 - val_loss: 0.4214\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3978 - val_loss: 0.3964\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3774 - val_loss: 0.3914\n", - "73/73 [==============================] - 0s 757us/step - loss: 0.3849\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.7389 - val_loss: 0.5297\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4580 - val_loss: 0.4932\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4165 - val_loss: 0.4136\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3978 - val_loss: 0.4246\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3865 - val_loss: 0.4346\n", - "73/73 [==============================] - 0s 750us/step - loss: 0.4057\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8434 - val_loss: 0.5559\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4721 - val_loss: 0.4607\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4149 - val_loss: 0.4158\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3888 - val_loss: 0.4170\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3753 - val_loss: 0.3879\n", - "73/73 [==============================] - 0s 765us/step - loss: 0.3910\n", - "Epoch 1/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.8391 - val_loss: 0.5323\n", - "Epoch 2/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4518 - val_loss: 0.4936\n", - "Epoch 3/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.4083 - val_loss: 0.4271\n", - "Epoch 4/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3850 - val_loss: 0.4405\n", - "Epoch 5/5\n", - "291/291 [==============================] - 0s 1ms/step - loss: 0.3731 - val_loss: 0.3860\n", - "73/73 [==============================] - 0s 749us/step - loss: 0.3935\n", - "Epoch 1/5\n", - "363/363 [==============================] - 0s 1ms/step - loss: 0.6946 - val_loss: 0.5043\n", - "Epoch 2/5\n", - "363/363 [==============================] - 0s 1ms/step - loss: 0.4602 - val_loss: 0.4307\n", - "Epoch 3/5\n", - "363/363 [==============================] - 0s 1ms/step - loss: 0.4124 - val_loss: 0.4057\n", - "Epoch 4/5\n", - "363/363 [==============================] - 0s 1ms/step - loss: 0.3900 - val_loss: 0.3942\n", - "Epoch 5/5\n", - "363/363 [==============================] - 0s 1ms/step - loss: 0.3799 - val_loss: 0.3794\n" - ] - }, - { - "data": { - "text/plain": [ - "GridSearchCV(estimator=,\n", - " param_grid={'hidden_layers': [1, 2, 3, 4],\n", - " 'layer_size': [5, 10, 20, 30],\n", - " 'learning_rate': [0.0001, 5e-05, 0.001, 0.005, 0.01]})" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "#scipy也是sk中的\n", - "from scipy.stats import reciprocal\n", - "# 分布函数\n", - "# f(x) = 1/(x*log(b/a)) a <= x <= b\n", - "\n", - "#sk 0.21.3版本可以用这种列表\n", - "# param_distribution = {\n", - "# \"hidden_layers\":[1, 2, 3, 4],\n", - "# \"layer_size\": np.arange(1, 100),\n", - "# \"learning_rate\": reciprocal(1e-4, 1e-2),\n", - "# }\n", - "#最新版本只能用普通列表\n", - "param_distribution = {\n", - " \"hidden_layers\": [1, 2, 3, 4],\n", - " \"layer_size\": [5, 10, 20, 30],\n", - " \"learning_rate\": [1e-4, 5e-5, 1e-3, 5e-3, 1e-2],\n", - "}\n", - "\n", - "from sklearn.model_selection import RandomizedSearchCV,GridSearchCV\n", - "\n", - "#随机搜索\n", - "# random_search_cv = RandomizedSearchCV(sklearn_model,\n", - "# param_distribution)\n", - "grid_search_cv =GridSearchCV(sklearn_model,param_distribution)\n", - "# random_search_cv.fit(x_train_scaled, y_train, epochs = 5,\n", - "# validation_data = (x_valid_scaled, y_valid),\n", - "# callbacks = callbacks)\n", - "\n", - "grid_search_cv.fit(x_train_scaled, y_train, epochs = 5,\n", - " validation_data = (x_valid_scaled, y_valid),\n", - " callbacks = callbacks)\n", - "# cross_validation: 训练集分成n份,n-1训练,最后一份验证." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'hidden_layers': 3, 'layer_size': 30, 'learning_rate': 0.01}\n", - "-0.3978272318840027\n", - "\n" - ] - } - ], - "source": [ - "# print(random_search_cv.best_params_)\n", - "# print(random_search_cv.best_score_)\n", - "# print(random_search_cv.best_estimator_)\n", - "\n", - "print(grid_search_cv.best_params_)\n", - "print(grid_search_cv.best_score_)\n", - "print(grid_search_cv.best_estimator_)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "162/162 [==============================] - 0s 770us/step - loss: 0.3876\n" - ] - }, - { - "data": { - "text/plain": [ - "0.38763508200645447" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "#拿最佳的模型\n", - "# model = random_search_cv.best_estimator_.model\n", - "\n", - "# model.evaluate(x_test_scaled, y_test)\n", - "\n", - "model = grid_search_cv.best_estimator_.model\n", - "model.evaluate(x_test_scaled, y_test)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -}