|
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247 |
- {
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "2.2.0\n",
- "sys.version_info(major=3, minor=6, micro=9, releaselevel='final', serial=0)\n",
- "matplotlib 3.3.4\n",
- "numpy 1.19.5\n",
- "pandas 1.1.5\n",
- "sklearn 0.24.2\n",
- "tensorflow 2.2.0\n",
- "tensorflow.keras 2.3.0-tf\n"
- ]
- }
- ],
- "source": [
- "import matplotlib as mpl\n",
- "import matplotlib.pyplot as plt\n",
- "%matplotlib inline\n",
- "import numpy as np\n",
- "import sklearn\n",
- "import pandas as pd\n",
- "import os\n",
- "import sys\n",
- "import time\n",
- "import tensorflow as tf\n",
- "\n",
- "from tensorflow import keras\n",
- "\n",
- "print(tf.__version__)\n",
- "print(sys.version_info)\n",
- "for module in mpl, np, pd, sklearn, tf, keras:\n",
- " print(module.__name__, module.__version__)\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "(5000, 28, 28) (5000,)\n",
- "(55000, 28, 28) (55000,)\n",
- "(10000, 28, 28) (10000,)\n"
- ]
- }
- ],
- "source": [
- "fashion_mnist = keras.datasets.fashion_mnist\n",
- "(x_train_all, y_train_all), (x_test, y_test) = fashion_mnist.load_data()\n",
- "x_valid, x_train = x_train_all[:5000], x_train_all[5000:]\n",
- "y_valid, y_train = y_train_all[:5000], y_train_all[5000:]\n",
- "\n",
- "print(x_valid.shape, y_valid.shape)\n",
- "print(x_train.shape, y_train.shape)\n",
- "print(x_test.shape, y_test.shape)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "metadata": {},
- "outputs": [],
- "source": [
- "# x = (x - u) / std\n",
- "\n",
- "from sklearn.preprocessing import StandardScaler\n",
- "\n",
- "scaler = StandardScaler()\n",
- "# x_train: [None, 28, 28] -> [None, 784]\n",
- "x_train_scaled = scaler.fit_transform(\n",
- " x_train.astype(np.float32).reshape(-1, 1)).reshape(-1, 28, 28)\n",
- "x_valid_scaled = scaler.transform(\n",
- " x_valid.astype(np.float32).reshape(-1, 1)).reshape(-1, 28, 28)\n",
- "x_test_scaled = scaler.transform(\n",
- " x_test.astype(np.float32).reshape(-1, 1)).reshape(-1, 28, 28)\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "metadata": {},
- "outputs": [],
- "source": [
- "# tf.keras.models.Sequential()\n",
- "\n",
- "model = keras.models.Sequential()\n",
- "model.add(keras.layers.Flatten(input_shape=[28, 28]))\n",
- "for _ in range(20):\n",
- " model.add(keras.layers.Dense(100, activation=\"selu\"))#把之前的relu改为了selu\n",
- "model.add(keras.layers.Dense(10, activation=\"softmax\"))\n",
- "\n",
- "model.compile(loss=\"sparse_categorical_crossentropy\",\n",
- " optimizer = keras.optimizers.SGD(0.001),\n",
- " metrics = [\"accuracy\"])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Help on class Dense in module tensorflow.python.keras.layers.core:\n",
- "\n",
- "class Dense(tensorflow.python.keras.engine.base_layer.Layer)\n",
- " | Just your regular densely-connected NN layer.\n",
- " | \n",
- " | `Dense` implements the operation:\n",
- " | `output = activation(dot(input, kernel) + bias)`\n",
- " | where `activation` is the element-wise activation function\n",
- " | passed as the `activation` argument, `kernel` is a weights matrix\n",
- " | created by the layer, and `bias` is a bias vector created by the layer\n",
- " | (only applicable if `use_bias` is `True`).\n",
- " | \n",
- " | Note: If the input to the layer has a rank greater than 2, then `Dense`\n",
- " | computes the dot product between the `inputs` and the `kernel` along the\n",
- " | last axis of the `inputs` and axis 1 of the `kernel` (using `tf.tensordot`).\n",
- " | For example, if input has dimensions `(batch_size, d0, d1)`,\n",
- " | then we create a `kernel` with shape `(d1, units)`, and the `kernel` operates\n",
- " | along axis 2 of the `input`, on every sub-tensor of shape `(1, 1, d1)`\n",
- " | (there are `batch_size * d0` such sub-tensors).\n",
- " | The output in this case will have shape `(batch_size, d0, units)`.\n",
- " | \n",
- " | Besides, layer attributes cannot be modified after the layer has been called\n",
- " | once (except the `trainable` attribute).\n",
- " | \n",
- " | Example:\n",
- " | \n",
- " | ```python\n",
- " | # as first layer in a sequential model:\n",
- " | model = Sequential()\n",
- " | model.add(Dense(32, input_shape=(16,)))\n",
- " | # now the model will take as input arrays of shape (*, 16)\n",
- " | # and output arrays of shape (*, 32)\n",
- " | \n",
- " | # after the first layer, you don't need to specify\n",
- " | # the size of the input anymore:\n",
- " | model.add(Dense(32))\n",
- " | ```\n",
- " | \n",
- " | Arguments:\n",
- " | units: Positive integer, dimensionality of the output space.\n",
- " | activation: Activation function to use.\n",
- " | If you don't specify anything, no activation is applied\n",
- " | (ie. \"linear\" activation: `a(x) = x`).\n",
- " | use_bias: Boolean, whether the layer uses a bias vector.\n",
- " | kernel_initializer: Initializer for the `kernel` weights matrix.\n",
- " | bias_initializer: Initializer for the bias vector.\n",
- " | kernel_regularizer: Regularizer function applied to\n",
- " | the `kernel` weights matrix.\n",
- " | bias_regularizer: Regularizer function applied to the bias vector.\n",
- " | activity_regularizer: Regularizer function applied to\n",
- " | the output of the layer (its \"activation\")..\n",
- " | kernel_constraint: Constraint function applied to\n",
- " | the `kernel` weights matrix.\n",
- " | bias_constraint: Constraint function applied to the bias vector.\n",
- " | \n",
- " | Input shape:\n",
- " | N-D tensor with shape: `(batch_size, ..., input_dim)`.\n",
- " | The most common situation would be\n",
- " | a 2D input with shape `(batch_size, input_dim)`.\n",
- " | \n",
- " | Output shape:\n",
- " | N-D tensor with shape: `(batch_size, ..., units)`.\n",
- " | For instance, for a 2D input with shape `(batch_size, input_dim)`,\n",
- " | the output would have shape `(batch_size, units)`.\n",
- " | \n",
- " | Method resolution order:\n",
- " | Dense\n",
- " | tensorflow.python.keras.engine.base_layer.Layer\n",
- " | tensorflow.python.module.module.Module\n",
- " | tensorflow.python.training.tracking.tracking.AutoTrackable\n",
- " | tensorflow.python.training.tracking.base.Trackable\n",
- " | tensorflow.python.keras.utils.version_utils.LayerVersionSelector\n",
- " | builtins.object\n",
- " | \n",
- " | Methods defined here:\n",
- " | \n",
- " | __init__(self, units, activation=None, use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros', kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, **kwargs)\n",
- " | \n",
- " | build(self, input_shape)\n",
- " | Creates the variables of the layer (optional, for subclass implementers).\n",
- " | \n",
- " | This is a method that implementers of subclasses of `Layer` or `Model`\n",
- " | can override if they need a state-creation step in-between\n",
- " | layer instantiation and layer call.\n",
- " | \n",
- " | This is typically used to create the weights of `Layer` subclasses.\n",
- " | \n",
- " | Arguments:\n",
- " | input_shape: Instance of `TensorShape`, or list of instances of\n",
- " | `TensorShape` if the layer expects a list of inputs\n",
- " | (one instance per input).\n",
- " | \n",
- " | call(self, inputs)\n",
- " | This is where the layer's logic lives.\n",
- " | \n",
- " | Arguments:\n",
- " | inputs: Input tensor, or list/tuple of input tensors.\n",
- " | **kwargs: Additional keyword arguments.\n",
- " | \n",
- " | Returns:\n",
- " | A tensor or list/tuple of tensors.\n",
- " | \n",
- " | compute_output_shape(self, input_shape)\n",
- " | Computes the output shape of the layer.\n",
- " | \n",
- " | If the layer has not been built, this method will call `build` on the\n",
- " | layer. This assumes that the layer will later be used with inputs that\n",
- " | match the input shape provided here.\n",
- " | \n",
- " | Arguments:\n",
- " | input_shape: Shape tuple (tuple of integers)\n",
- " | or list of shape tuples (one per output tensor of the layer).\n",
- " | Shape tuples can include None for free dimensions,\n",
- " | instead of an integer.\n",
- " | \n",
- " | Returns:\n",
- " | An input shape tuple.\n",
- " | \n",
- " | get_config(self)\n",
- " | Returns the config of the layer.\n",
- " | \n",
- " | A layer config is a Python dictionary (serializable)\n",
- " | containing the configuration of a layer.\n",
- " | The same layer can be reinstantiated later\n",
- " | (without its trained weights) from this configuration.\n",
- " | \n",
- " | The config of a layer does not include connectivity\n",
- " | information, nor the layer class name. These are handled\n",
- " | by `Network` (one layer of abstraction above).\n",
- " | \n",
- " | Returns:\n",
- " | Python dictionary.\n",
- " | \n",
- " | ----------------------------------------------------------------------\n",
- " | Methods inherited from tensorflow.python.keras.engine.base_layer.Layer:\n",
- " | \n",
- " | __call__(self, *args, **kwargs)\n",
- " | Wraps `call`, applying pre- and post-processing steps.\n",
- " | \n",
- " | Arguments:\n",
- " | *args: Positional arguments to be passed to `self.call`.\n",
- " | **kwargs: Keyword arguments to be passed to `self.call`.\n",
- " | \n",
- " | Returns:\n",
- " | Output tensor(s).\n",
- " | \n",
- " | Note:\n",
- " | - The following optional keyword arguments are reserved for specific uses:\n",
- " | * `training`: Boolean scalar tensor of Python boolean indicating\n",
- " | whether the `call` is meant for training or inference.\n",
- " | * `mask`: Boolean input mask.\n",
- " | - If the layer's `call` method takes a `mask` argument (as some Keras\n",
- " | layers do), its default value will be set to the mask generated\n",
- " | for `inputs` by the previous layer (if `input` did come from\n",
- " | a layer that generated a corresponding mask, i.e. if it came from\n",
- " | a Keras layer with masking support.\n",
- " | \n",
- " | Raises:\n",
- " | ValueError: if the layer's `call` method returns None (an invalid value).\n",
- " | RuntimeError: if `super().__init__()` was not called in the constructor.\n",
- " | \n",
- " | __delattr__(self, name)\n",
- " | Implement delattr(self, name).\n",
- " | \n",
- " | __getstate__(self)\n",
- " | \n",
- " | __setattr__(self, name, value)\n",
- " | Support self.foo = trackable syntax.\n",
- " | \n",
- " | __setstate__(self, state)\n",
- " | \n",
- " | add_loss(self, losses, inputs=None)\n",
- " | Add loss tensor(s), potentially dependent on layer inputs.\n",
- " | \n",
- " | Some losses (for instance, activity regularization losses) may be dependent\n",
- " | on the inputs passed when calling a layer. Hence, when reusing the same\n",
- " | layer on different inputs `a` and `b`, some entries in `layer.losses` may\n",
- " | be dependent on `a` and some on `b`. This method automatically keeps track\n",
- " | of dependencies.\n",
- " | \n",
- " | This method can be used inside a subclassed layer or model's `call`\n",
- " | function, in which case `losses` should be a Tensor or list of Tensors.\n",
- " | \n",
- " | Example:\n",
- " | \n",
- " | ```python\n",
- " | class MyLayer(tf.keras.layers.Layer):\n",
- " | def call(inputs, self):\n",
- " | self.add_loss(tf.abs(tf.reduce_mean(inputs)), inputs=True)\n",
- " | return inputs\n",
- " | ```\n",
- " | \n",
- " | This method can also be called directly on a Functional Model during\n",
- " | construction. In this case, any loss Tensors passed to this Model must\n",
- " | be symbolic and be able to be traced back to the model's `Input`s. These\n",
- " | losses become part of the model's topology and are tracked in `get_config`.\n",
- " | \n",
- " | Example:\n",
- " | \n",
- " | ```python\n",
- " | inputs = tf.keras.Input(shape=(10,))\n",
- " | x = tf.keras.layers.Dense(10)(inputs)\n",
- " | outputs = tf.keras.layers.Dense(1)(x)\n",
- " | model = tf.keras.Model(inputs, outputs)\n",
- " | # Activity regularization.\n",
- " | model.add_loss(tf.abs(tf.reduce_mean(x)))\n",
- " | ```\n",
- " | \n",
- " | If this is not the case for your loss (if, for example, your loss references\n",
- " | a `Variable` of one of the model's layers), you can wrap your loss in a\n",
- " | zero-argument lambda. These losses are not tracked as part of the model's\n",
- " | topology since they can't be serialized.\n",
- " | \n",
- " | Example:\n",
- " | \n",
- " | ```python\n",
- " | inputs = tf.keras.Input(shape=(10,))\n",
- " | x = tf.keras.layers.Dense(10)(inputs)\n",
- " | outputs = tf.keras.layers.Dense(1)(x)\n",
- " | model = tf.keras.Model(inputs, outputs)\n",
- " | # Weight regularization.\n",
- " | model.add_loss(lambda: tf.reduce_mean(x.kernel))\n",
- " | ```\n",
- " | \n",
- " | The `get_losses_for` method allows to retrieve the losses relevant to a\n",
- " | specific set of inputs.\n",
- " | \n",
- " | Arguments:\n",
- " | losses: Loss tensor, or list/tuple of tensors. Rather than tensors, losses\n",
- " | may also be zero-argument callables which create a loss tensor.\n",
- " | inputs: Ignored when executing eagerly. If anything other than None is\n",
- " | passed, it signals the losses are conditional on some of the layer's\n",
- " | inputs, and thus they should only be run where these inputs are\n",
- " | available. This is the case for activity regularization losses, for\n",
- " | instance. If `None` is passed, the losses are assumed\n",
- " | to be unconditional, and will apply across all dataflows of the layer\n",
- " | (e.g. weight regularization losses).\n",
- " | \n",
- " | add_metric(self, value, aggregation=None, name=None)\n",
- " | Adds metric tensor to the layer.\n",
- " | \n",
- " | Args:\n",
- " | value: Metric tensor.\n",
- " | aggregation: Sample-wise metric reduction function. If `aggregation=None`,\n",
- " | it indicates that the metric tensor provided has been aggregated\n",
- " | already. eg, `bin_acc = BinaryAccuracy(name='acc')` followed by\n",
- " | `model.add_metric(bin_acc(y_true, y_pred))`. If aggregation='mean', the\n",
- " | given metric tensor will be sample-wise reduced using `mean` function.\n",
- " | eg, `model.add_metric(tf.reduce_sum(outputs), name='output_mean',\n",
- " | aggregation='mean')`.\n",
- " | name: String metric name.\n",
- " | \n",
- " | Raises:\n",
- " | ValueError: If `aggregation` is anything other than None or `mean`.\n",
- " | \n",
- " | add_update(self, updates, inputs=None)\n",
- " | Add update op(s), potentially dependent on layer inputs. (deprecated arguments)\n",
- " | \n",
- " | Warning: SOME ARGUMENTS ARE DEPRECATED: `(inputs)`. They will be removed in a future version.\n",
- " | Instructions for updating:\n",
- " | `inputs` is now automatically inferred\n",
- " | \n",
- " | Weight updates (for instance, the updates of the moving mean and variance\n",
- " | in a BatchNormalization layer) may be dependent on the inputs passed\n",
- " | when calling a layer. Hence, when reusing the same layer on\n",
- " | different inputs `a` and `b`, some entries in `layer.updates` may be\n",
- " | dependent on `a` and some on `b`. This method automatically keeps track\n",
- " | of dependencies.\n",
- " | \n",
- " | The `get_updates_for` method allows to retrieve the updates relevant to a\n",
- " | specific set of inputs.\n",
- " | \n",
- " | This call is ignored when eager execution is enabled (in that case, variable\n",
- " | updates are run on the fly and thus do not need to be tracked for later\n",
- " | execution).\n",
- " | \n",
- " | Arguments:\n",
- " | updates: Update op, or list/tuple of update ops, or zero-arg callable\n",
- " | that returns an update op. A zero-arg callable should be passed in\n",
- " | order to disable running the updates by setting `trainable=False`\n",
- " | on this Layer, when executing in Eager mode.\n",
- " | inputs: Deprecated, will be automatically inferred.\n",
- " | \n",
- " | add_variable(self, *args, **kwargs)\n",
- " | Deprecated, do NOT use! Alias for `add_weight`. (deprecated)\n",
- " | \n",
- " | Warning: THIS FUNCTION IS DEPRECATED. It will be removed in a future version.\n",
- " | Instructions for updating:\n",
- " | Please use `layer.add_weight` method instead.\n",
- " | \n",
- " | add_weight(self, name=None, shape=None, dtype=None, initializer=None, regularizer=None, trainable=None, constraint=None, partitioner=None, use_resource=None, synchronization=<VariableSynchronization.AUTO: 0>, aggregation=<VariableAggregation.NONE: 0>, **kwargs)\n",
- " | Adds a new variable to the layer.\n",
- " | \n",
- " | Arguments:\n",
- " | name: Variable name.\n",
- " | shape: Variable shape. Defaults to scalar if unspecified.\n",
- " | dtype: The type of the variable. Defaults to `self.dtype` or `float32`.\n",
- " | initializer: Initializer instance (callable).\n",
- " | regularizer: Regularizer instance (callable).\n",
- " | trainable: Boolean, whether the variable should be part of the layer's\n",
- " | \"trainable_variables\" (e.g. variables, biases)\n",
- " | or \"non_trainable_variables\" (e.g. BatchNorm mean and variance).\n",
- " | Note that `trainable` cannot be `True` if `synchronization`\n",
- " | is set to `ON_READ`.\n",
- " | constraint: Constraint instance (callable).\n",
- " | partitioner: Partitioner to be passed to the `Trackable` API.\n",
- " | use_resource: Whether to use `ResourceVariable`.\n",
- " | synchronization: Indicates when a distributed a variable will be\n",
- " | aggregated. Accepted values are constants defined in the class\n",
- " | `tf.VariableSynchronization`. By default the synchronization is set to\n",
- " | `AUTO` and the current `DistributionStrategy` chooses\n",
- " | when to synchronize. If `synchronization` is set to `ON_READ`,\n",
- " | `trainable` must not be set to `True`.\n",
- " | aggregation: Indicates how a distributed variable will be aggregated.\n",
- " | Accepted values are constants defined in the class\n",
- " | `tf.VariableAggregation`.\n",
- " | **kwargs: Additional keyword arguments. Accepted values are `getter`,\n",
- " | `collections`, `experimental_autocast` and `caching_device`.\n",
- " | \n",
- " | Returns:\n",
- " | The created variable. Usually either a `Variable` or `ResourceVariable`\n",
- " | instance. If `partitioner` is not `None`, a `PartitionedVariable`\n",
- " | instance is returned.\n",
- " | \n",
- " | Raises:\n",
- " | RuntimeError: If called with partitioned variable regularization and\n",
- " | eager execution is enabled.\n",
- " | ValueError: When giving unsupported dtype and no initializer or when\n",
- " | trainable has been set to True with synchronization set as `ON_READ`.\n",
- " | \n",
- " | apply(self, inputs, *args, **kwargs)\n",
- " | Deprecated, do NOT use! (deprecated)\n",
- " | \n",
- " | Warning: THIS FUNCTION IS DEPRECATED. It will be removed in a future version.\n",
- " | Instructions for updating:\n",
- " | Please use `layer.__call__` method instead.\n",
- " | \n",
- " | This is an alias of `self.__call__`.\n",
- " | \n",
- " | Arguments:\n",
- " | inputs: Input tensor(s).\n",
- " | *args: additional positional arguments to be passed to `self.call`.\n",
- " | **kwargs: additional keyword arguments to be passed to `self.call`.\n",
- " | \n",
- " | Returns:\n",
- " | Output tensor(s).\n",
- " | \n",
- " | compute_mask(self, inputs, mask=None)\n",
- " | Computes an output mask tensor.\n",
- " | \n",
- " | Arguments:\n",
- " | inputs: Tensor or list of tensors.\n",
- " | mask: Tensor or list of tensors.\n",
- " | \n",
- " | Returns:\n",
- " | None or a tensor (or list of tensors,\n",
- " | one per output tensor of the layer).\n",
- " | \n",
- " | compute_output_signature(self, input_signature)\n",
- " | Compute the output tensor signature of the layer based on the inputs.\n",
- " | \n",
- " | Unlike a TensorShape object, a TensorSpec object contains both shape\n",
- " | and dtype information for a tensor. This method allows layers to provide\n",
- " | output dtype information if it is different from the input dtype.\n",
- " | For any layer that doesn't implement this function,\n",
- " | the framework will fall back to use `compute_output_shape`, and will\n",
- " | assume that the output dtype matches the input dtype.\n",
- " | \n",
- " | Args:\n",
- " | input_signature: Single TensorSpec or nested structure of TensorSpec\n",
- " | objects, describing a candidate input for the layer.\n",
- " | \n",
- " | Returns:\n",
- " | Single TensorSpec or nested structure of TensorSpec objects, describing\n",
- " | how the layer would transform the provided input.\n",
- " | \n",
- " | Raises:\n",
- " | TypeError: If input_signature contains a non-TensorSpec object.\n",
- " | \n",
- " | count_params(self)\n",
- " | Count the total number of scalars composing the weights.\n",
- " | \n",
- " | Returns:\n",
- " | An integer count.\n",
- " | \n",
- " | Raises:\n",
- " | ValueError: if the layer isn't yet built\n",
- " | (in which case its weights aren't yet defined).\n",
- " | \n",
- " | get_input_at(self, node_index)\n",
- " | Retrieves the input tensor(s) of a layer at a given node.\n",
- " | \n",
- " | Arguments:\n",
- " | node_index: Integer, index of the node\n",
- " | from which to retrieve the attribute.\n",
- " | E.g. `node_index=0` will correspond to the\n",
- " | first time the layer was called.\n",
- " | \n",
- " | Returns:\n",
- " | A tensor (or list of tensors if the layer has multiple inputs).\n",
- " | \n",
- " | Raises:\n",
- " | RuntimeError: If called in Eager mode.\n",
- " | \n",
- " | get_input_mask_at(self, node_index)\n",
- " | Retrieves the input mask tensor(s) of a layer at a given node.\n",
- " | \n",
- " | Arguments:\n",
- " | node_index: Integer, index of the node\n",
- " | from which to retrieve the attribute.\n",
- " | E.g. `node_index=0` will correspond to the\n",
- " | first time the layer was called.\n",
- " | \n",
- " | Returns:\n",
- " | A mask tensor\n",
- " | (or list of tensors if the layer has multiple inputs).\n",
- " | \n",
- " | get_input_shape_at(self, node_index)\n",
- " | Retrieves the input shape(s) of a layer at a given node.\n",
- " | \n",
- " | Arguments:\n",
- " | node_index: Integer, index of the node\n",
- " | from which to retrieve the attribute.\n",
- " | E.g. `node_index=0` will correspond to the\n",
- " | first time the layer was called.\n",
- " | \n",
- " | Returns:\n",
- " | A shape tuple\n",
- " | (or list of shape tuples if the layer has multiple inputs).\n",
- " | \n",
- " | Raises:\n",
- " | RuntimeError: If called in Eager mode.\n",
- " | \n",
- " | get_losses_for(self, inputs)\n",
- " | Retrieves losses relevant to a specific set of inputs.\n",
- " | \n",
- " | Arguments:\n",
- " | inputs: Input tensor or list/tuple of input tensors.\n",
- " | \n",
- " | Returns:\n",
- " | List of loss tensors of the layer that depend on `inputs`.\n",
- " | \n",
- " | get_output_at(self, node_index)\n",
- " | Retrieves the output tensor(s) of a layer at a given node.\n",
- " | \n",
- " | Arguments:\n",
- " | node_index: Integer, index of the node\n",
- " | from which to retrieve the attribute.\n",
- " | E.g. `node_index=0` will correspond to the\n",
- " | first time the layer was called.\n",
- " | \n",
- " | Returns:\n",
- " | A tensor (or list of tensors if the layer has multiple outputs).\n",
- " | \n",
- " | Raises:\n",
- " | RuntimeError: If called in Eager mode.\n",
- " | \n",
- " | get_output_mask_at(self, node_index)\n",
- " | Retrieves the output mask tensor(s) of a layer at a given node.\n",
- " | \n",
- " | Arguments:\n",
- " | node_index: Integer, index of the node\n",
- " | from which to retrieve the attribute.\n",
- " | E.g. `node_index=0` will correspond to the\n",
- " | first time the layer was called.\n",
- " | \n",
- " | Returns:\n",
- " | A mask tensor\n",
- " | (or list of tensors if the layer has multiple outputs).\n",
- " | \n",
- " | get_output_shape_at(self, node_index)\n",
- " | Retrieves the output shape(s) of a layer at a given node.\n",
- " | \n",
- " | Arguments:\n",
- " | node_index: Integer, index of the node\n",
- " | from which to retrieve the attribute.\n",
- " | E.g. `node_index=0` will correspond to the\n",
- " | first time the layer was called.\n",
- " | \n",
- " | Returns:\n",
- " | A shape tuple\n",
- " | (or list of shape tuples if the layer has multiple outputs).\n",
- " | \n",
- " | Raises:\n",
- " | RuntimeError: If called in Eager mode.\n",
- " | \n",
- " | get_updates_for(self, inputs)\n",
- " | Retrieves updates relevant to a specific set of inputs.\n",
- " | \n",
- " | Arguments:\n",
- " | inputs: Input tensor or list/tuple of input tensors.\n",
- " | \n",
- " | Returns:\n",
- " | List of update ops of the layer that depend on `inputs`.\n",
- " | \n",
- " | get_weights(self)\n",
- " | Returns the current weights of the layer.\n",
- " | \n",
- " | The weights of a layer represent the state of the layer. This function\n",
- " | returns both trainable and non-trainable weight values associated with this\n",
- " | layer as a list of Numpy arrays, which can in turn be used to load state\n",
- " | into similarly parameterized layers.\n",
- " | \n",
- " | For example, a Dense layer returns a list of two values-- per-output\n",
- " | weights and the bias value. These can be used to set the weights of another\n",
- " | Dense layer:\n",
- " | \n",
- " | >>> a = tf.keras.layers.Dense(1,\n",
- " | ... kernel_initializer=tf.constant_initializer(1.))\n",
- " | >>> a_out = a(tf.convert_to_tensor([[1., 2., 3.]]))\n",
- " | >>> a.get_weights()\n",
- " | [array([[1.],\n",
- " | [1.],\n",
- " | [1.]], dtype=float32), array([0.], dtype=float32)]\n",
- " | >>> b = tf.keras.layers.Dense(1,\n",
- " | ... kernel_initializer=tf.constant_initializer(2.))\n",
- " | >>> b_out = b(tf.convert_to_tensor([[10., 20., 30.]]))\n",
- " | >>> b.get_weights()\n",
- " | [array([[2.],\n",
- " | [2.],\n",
- " | [2.]], dtype=float32), array([0.], dtype=float32)]\n",
- " | >>> b.set_weights(a.get_weights())\n",
- " | >>> b.get_weights()\n",
- " | [array([[1.],\n",
- " | [1.],\n",
- " | [1.]], dtype=float32), array([0.], dtype=float32)]\n",
- " | \n",
- " | Returns:\n",
- " | Weights values as a list of numpy arrays.\n",
- " | \n",
- " | set_weights(self, weights)\n",
- " | Sets the weights of the layer, from Numpy arrays.\n",
- " | \n",
- " | The weights of a layer represent the state of the layer. This function\n",
- " | sets the weight values from numpy arrays. The weight values should be\n",
- " | passed in the order they are created by the layer. Note that the layer's\n",
- " | weights must be instantiated before calling this function by calling\n",
- " | the layer.\n",
- " | \n",
- " | For example, a Dense layer returns a list of two values-- per-output\n",
- " | weights and the bias value. These can be used to set the weights of another\n",
- " | Dense layer:\n",
- " | \n",
- " | >>> a = tf.keras.layers.Dense(1,\n",
- " | ... kernel_initializer=tf.constant_initializer(1.))\n",
- " | >>> a_out = a(tf.convert_to_tensor([[1., 2., 3.]]))\n",
- " | >>> a.get_weights()\n",
- " | [array([[1.],\n",
- " | [1.],\n",
- " | [1.]], dtype=float32), array([0.], dtype=float32)]\n",
- " | >>> b = tf.keras.layers.Dense(1,\n",
- " | ... kernel_initializer=tf.constant_initializer(2.))\n",
- " | >>> b_out = b(tf.convert_to_tensor([[10., 20., 30.]]))\n",
- " | >>> b.get_weights()\n",
- " | [array([[2.],\n",
- " | [2.],\n",
- " | [2.]], dtype=float32), array([0.], dtype=float32)]\n",
- " | >>> b.set_weights(a.get_weights())\n",
- " | >>> b.get_weights()\n",
- " | [array([[1.],\n",
- " | [1.],\n",
- " | [1.]], dtype=float32), array([0.], dtype=float32)]\n",
- " | \n",
- " | Arguments:\n",
- " | weights: a list of Numpy arrays. The number\n",
- " | of arrays and their shape must match\n",
- " | number of the dimensions of the weights\n",
- " | of the layer (i.e. it should match the\n",
- " | output of `get_weights`).\n",
- " | \n",
- " | Raises:\n",
- " | ValueError: If the provided weights list does not match the\n",
- " | layer's specifications.\n",
- " | \n",
- " | ----------------------------------------------------------------------\n",
- " | Class methods inherited from tensorflow.python.keras.engine.base_layer.Layer:\n",
- " | \n",
- " | from_config(config) from builtins.type\n",
- " | Creates a layer from its config.\n",
- " | \n",
- " | This method is the reverse of `get_config`,\n",
- " | capable of instantiating the same layer from the config\n",
- " | dictionary. It does not handle layer connectivity\n",
- " | (handled by Network), nor weights (handled by `set_weights`).\n",
- " | \n",
- " | Arguments:\n",
- " | config: A Python dictionary, typically the\n",
- " | output of get_config.\n",
- " | \n",
- " | Returns:\n",
- " | A layer instance.\n",
- " | \n",
- " | ----------------------------------------------------------------------\n",
- " | Data descriptors inherited from tensorflow.python.keras.engine.base_layer.Layer:\n",
- " | \n",
- " | activity_regularizer\n",
- " | Optional regularizer function for the output of this layer.\n",
- " | \n",
- " | dtype\n",
- " | Dtype used by the weights of the layer, set in the constructor.\n",
- " | \n",
- " | dynamic\n",
- " | Whether the layer is dynamic (eager-only); set in the constructor.\n",
- " | \n",
- " | inbound_nodes\n",
- " | Deprecated, do NOT use! Only for compatibility with external Keras.\n",
- " | \n",
- " | input\n",
- " | Retrieves the input tensor(s) of a layer.\n",
- " | \n",
- " | Only applicable if the layer has exactly one input,\n",
- " | i.e. if it is connected to one incoming layer.\n",
- " | \n",
- " | Returns:\n",
- " | Input tensor or list of input tensors.\n",
- " | \n",
- " | Raises:\n",
- " | RuntimeError: If called in Eager mode.\n",
- " | AttributeError: If no inbound nodes are found.\n",
- " | \n",
- " | input_mask\n",
- " | Retrieves the input mask tensor(s) of a layer.\n",
- " | \n",
- " | Only applicable if the layer has exactly one inbound node,\n",
- " | i.e. if it is connected to one incoming layer.\n",
- " | \n",
- " | Returns:\n",
- " | Input mask tensor (potentially None) or list of input\n",
- " | mask tensors.\n",
- " | \n",
- " | Raises:\n",
- " | AttributeError: if the layer is connected to\n",
- " | more than one incoming layers.\n",
- " | \n",
- " | input_shape\n",
- " | Retrieves the input shape(s) of a layer.\n",
- " | \n",
- " | Only applicable if the layer has exactly one input,\n",
- " | i.e. if it is connected to one incoming layer, or if all inputs\n",
- " | have the same shape.\n",
- " | \n",
- " | Returns:\n",
- " | Input shape, as an integer shape tuple\n",
- " | (or list of shape tuples, one tuple per input tensor).\n",
- " | \n",
- " | Raises:\n",
- " | AttributeError: if the layer has no defined input_shape.\n",
- " | RuntimeError: if called in Eager mode.\n",
- " | \n",
- " | input_spec\n",
- " | `InputSpec` instance(s) describing the input format for this layer.\n",
- " | \n",
- " | When you create a layer subclass, you can set `self.input_spec` to enable\n",
- " | the layer to run input compatibility checks when it is called.\n",
- " | Consider a `Conv2D` layer: it can only be called on a single input tensor\n",
- " | of rank 4. As such, you can set, in `__init__()`:\n",
- " | \n",
- " | ```python\n",
- " | self.input_spec = tf.keras.layers.InputSpec(ndim=4)\n",
- " | ```\n",
- " | \n",
- " | Now, if you try to call the layer on an input that isn't rank 4\n",
- " | (for instance, an input of shape `(2,)`, it will raise a nicely-formatted\n",
- " | error:\n",
- " | \n",
- " | ```\n",
- " | ValueError: Input 0 of layer conv2d is incompatible with the layer:\n",
- " | expected ndim=4, found ndim=1. Full shape received: [2]\n",
- " | ```\n",
- " | \n",
- " | Input checks that can be specified via `input_spec` include:\n",
- " | - Structure (e.g. a single input, a list of 2 inputs, etc)\n",
- " | - Shape\n",
- " | - Rank (ndim)\n",
- " | - Dtype\n",
- " | \n",
- " | For more information, see `tf.keras.layers.InputSpec`.\n",
- " | \n",
- " | Returns:\n",
- " | A `tf.keras.layers.InputSpec` instance, or nested structure thereof.\n",
- " | \n",
- " | losses\n",
- " | Losses which are associated with this `Layer`.\n",
- " | \n",
- " | Variable regularization tensors are created when this property is accessed,\n",
- " | so it is eager safe: accessing `losses` under a `tf.GradientTape` will\n",
- " | propagate gradients back to the corresponding variables.\n",
- " | \n",
- " | Returns:\n",
- " | A list of tensors.\n",
- " | \n",
- " | metrics\n",
- " | List of `tf.keras.metrics.Metric` instances tracked by the layer.\n",
- " | \n",
- " | name\n",
- " | Name of the layer (string), set in the constructor.\n",
- " | \n",
- " | non_trainable_variables\n",
- " | \n",
- " | non_trainable_weights\n",
- " | List of all non-trainable weights tracked by this layer.\n",
- " | \n",
- " | Non-trainable weights are *not* updated during training. They are expected\n",
- " | to be updated manually in `call()`.\n",
- " | \n",
- " | Returns:\n",
- " | A list of non-trainable variables.\n",
- " | \n",
- " | outbound_nodes\n",
- " | Deprecated, do NOT use! Only for compatibility with external Keras.\n",
- " | \n",
- " | output\n",
- " | Retrieves the output tensor(s) of a layer.\n",
- " | \n",
- " | Only applicable if the layer has exactly one output,\n",
- " | i.e. if it is connected to one incoming layer.\n",
- " | \n",
- " | Returns:\n",
- " | Output tensor or list of output tensors.\n",
- " | \n",
- " | Raises:\n",
- " | AttributeError: if the layer is connected to more than one incoming\n",
- " | layers.\n",
- " | RuntimeError: if called in Eager mode.\n",
- " | \n",
- " | output_mask\n",
- " | Retrieves the output mask tensor(s) of a layer.\n",
- " | \n",
- " | Only applicable if the layer has exactly one inbound node,\n",
- " | i.e. if it is connected to one incoming layer.\n",
- " | \n",
- " | Returns:\n",
- " | Output mask tensor (potentially None) or list of output\n",
- " | mask tensors.\n",
- " | \n",
- " | Raises:\n",
- " | AttributeError: if the layer is connected to\n",
- " | more than one incoming layers.\n",
- " | \n",
- " | output_shape\n",
- " | Retrieves the output shape(s) of a layer.\n",
- " | \n",
- " | Only applicable if the layer has one output,\n",
- " | or if all outputs have the same shape.\n",
- " | \n",
- " | Returns:\n",
- " | Output shape, as an integer shape tuple\n",
- " | (or list of shape tuples, one tuple per output tensor).\n",
- " | \n",
- " | Raises:\n",
- " | AttributeError: if the layer has no defined output shape.\n",
- " | RuntimeError: if called in Eager mode.\n",
- " | \n",
- " | stateful\n",
- " | \n",
- " | trainable\n",
- " | \n",
- " | trainable_variables\n",
- " | Sequence of trainable variables owned by this module and its submodules.\n",
- " | \n",
- " | Note: this method uses reflection to find variables on the current instance\n",
- " | and submodules. For performance reasons you may wish to cache the result\n",
- " | of calling this method if you don't expect the return value to change.\n",
- " | \n",
- " | Returns:\n",
- " | A sequence of variables for the current module (sorted by attribute\n",
- " | name) followed by variables from all submodules recursively (breadth\n",
- " | first).\n",
- " | \n",
- " | trainable_weights\n",
- " | List of all trainable weights tracked by this layer.\n",
- " | \n",
- " | Trainable weights are updated via gradient descent during training.\n",
- " | \n",
- " | Returns:\n",
- " | A list of trainable variables.\n",
- " | \n",
- " | updates\n",
- " | \n",
- " | variables\n",
- " | Returns the list of all layer variables/weights.\n",
- " | \n",
- " | Alias of `self.weights`.\n",
- " | \n",
- " | Returns:\n",
- " | A list of variables.\n",
- " | \n",
- " | weights\n",
- " | Returns the list of all layer variables/weights.\n",
- " | \n",
- " | Returns:\n",
- " | A list of variables.\n",
- " | \n",
- " | ----------------------------------------------------------------------\n",
- " | Class methods inherited from tensorflow.python.module.module.Module:\n",
- " | \n",
- " | with_name_scope(method) from builtins.type\n",
- " | Decorator to automatically enter the module name scope.\n",
- " | \n",
- " | >>> class MyModule(tf.Module):\n",
- " | ... @tf.Module.with_name_scope\n",
- " | ... def __call__(self, x):\n",
- " | ... if not hasattr(self, 'w'):\n",
- " | ... self.w = tf.Variable(tf.random.normal([x.shape[1], 3]))\n",
- " | ... return tf.matmul(x, self.w)\n",
- " | \n",
- " | Using the above module would produce `tf.Variable`s and `tf.Tensor`s whose\n",
- " | names included the module name:\n",
- " | \n",
- " | >>> mod = MyModule()\n",
- " | >>> mod(tf.ones([1, 2]))\n",
- " | <tf.Tensor: shape=(1, 3), dtype=float32, numpy=..., dtype=float32)>\n",
- " | >>> mod.w\n",
- " | <tf.Variable 'my_module/Variable:0' shape=(2, 3) dtype=float32,\n",
- " | numpy=..., dtype=float32)>\n",
- " | \n",
- " | Args:\n",
- " | method: The method to wrap.\n",
- " | \n",
- " | Returns:\n",
- " | The original method wrapped such that it enters the module's name scope.\n",
- " | \n",
- " | ----------------------------------------------------------------------\n",
- " | Data descriptors inherited from tensorflow.python.module.module.Module:\n",
- " | \n",
- " | name_scope\n",
- " | Returns a `tf.name_scope` instance for this class.\n",
- " | \n",
- " | submodules\n",
- " | Sequence of all sub-modules.\n",
- " | \n",
- " | Submodules are modules which are properties of this module, or found as\n",
- " | properties of modules which are properties of this module (and so on).\n",
- " | \n",
- " | >>> a = tf.Module()\n",
- " | >>> b = tf.Module()\n",
- " | >>> c = tf.Module()\n",
- " | >>> a.b = b\n",
- " | >>> b.c = c\n",
- " | >>> list(a.submodules) == [b, c]\n",
- " | True\n",
- " | >>> list(b.submodules) == [c]\n",
- " | True\n",
- " | >>> list(c.submodules) == []\n",
- " | True\n",
- " | \n",
- " | Returns:\n",
- " | A sequence of all submodules.\n",
- " | \n",
- " | ----------------------------------------------------------------------\n",
- " | Data descriptors inherited from tensorflow.python.training.tracking.base.Trackable:\n",
- " | \n",
- " | __dict__\n",
- " | dictionary for instance variables (if defined)\n",
- " | \n",
- " | __weakref__\n",
- " | list of weak references to the object (if defined)\n",
- " | \n",
- " | ----------------------------------------------------------------------\n",
- " | Static methods inherited from tensorflow.python.keras.utils.version_utils.LayerVersionSelector:\n",
- " | \n",
- " | __new__(cls, *args, **kwargs)\n",
- " | Create and return a new object. See help(type) for accurate signature.\n",
- "\n"
- ]
- }
- ],
- "source": [
- "help(keras.layers.Dense)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Model: \"sequential\"\n",
- "_________________________________________________________________\n",
- "Layer (type) Output Shape Param # \n",
- "=================================================================\n",
- "flatten (Flatten) (None, 784) 0 \n",
- "_________________________________________________________________\n",
- "dense (Dense) (None, 100) 78500 \n",
- "_________________________________________________________________\n",
- "dense_1 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_2 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_3 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_4 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_5 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_6 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_7 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_8 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_9 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_10 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_11 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_12 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_13 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_14 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_15 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_16 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_17 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_18 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_19 (Dense) (None, 100) 10100 \n",
- "_________________________________________________________________\n",
- "dense_20 (Dense) (None, 10) 1010 \n",
- "=================================================================\n",
- "Total params: 271,410\n",
- "Trainable params: 271,410\n",
- "Non-trainable params: 0\n",
- "_________________________________________________________________\n"
- ]
- }
- ],
- "source": [
- "model.summary()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "22"
- ]
- },
- "execution_count": 7,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "len(model.layers)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Epoch 1/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.6965 - accuracy: 0.7549 - val_loss: 0.4953 - val_accuracy: 0.8294\n",
- "Epoch 2/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.4751 - accuracy: 0.8291 - val_loss: 0.4481 - val_accuracy: 0.8398\n",
- "Epoch 3/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.4210 - accuracy: 0.8479 - val_loss: 0.4240 - val_accuracy: 0.8508\n",
- "Epoch 4/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.3905 - accuracy: 0.8593 - val_loss: 0.4034 - val_accuracy: 0.8564\n",
- "Epoch 5/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.3662 - accuracy: 0.8675 - val_loss: 0.3860 - val_accuracy: 0.8622\n",
- "Epoch 6/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.3485 - accuracy: 0.8717 - val_loss: 0.3814 - val_accuracy: 0.8678\n",
- "Epoch 7/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.3338 - accuracy: 0.8782 - val_loss: 0.3728 - val_accuracy: 0.8648\n",
- "Epoch 8/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.3209 - accuracy: 0.8830 - val_loss: 0.3673 - val_accuracy: 0.8696\n",
- "Epoch 9/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.3104 - accuracy: 0.8862 - val_loss: 0.3666 - val_accuracy: 0.8688\n",
- "Epoch 10/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.2995 - accuracy: 0.8899 - val_loss: 0.3679 - val_accuracy: 0.8676\n",
- "Epoch 11/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.2902 - accuracy: 0.8938 - val_loss: 0.3609 - val_accuracy: 0.8708\n",
- "Epoch 12/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.2824 - accuracy: 0.8961 - val_loss: 0.3637 - val_accuracy: 0.8710\n",
- "Epoch 13/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.2740 - accuracy: 0.8995 - val_loss: 0.3622 - val_accuracy: 0.8700\n",
- "Epoch 14/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.2668 - accuracy: 0.9024 - val_loss: 0.3600 - val_accuracy: 0.8764\n",
- "Epoch 15/100\n",
- "1719/1719 [==============================] - 6s 4ms/step - loss: 0.2602 - accuracy: 0.9040 - val_loss: 0.3649 - val_accuracy: 0.8728\n",
- "Epoch 16/100\n",
- "1719/1719 [==============================] - 6s 4ms/step - loss: 0.2538 - accuracy: 0.9075 - val_loss: 0.3515 - val_accuracy: 0.8756\n",
- "Epoch 17/100\n",
- "1719/1719 [==============================] - 6s 4ms/step - loss: 0.2461 - accuracy: 0.9102 - val_loss: 0.3537 - val_accuracy: 0.8734\n",
- "Epoch 18/100\n",
- "1719/1719 [==============================] - 6s 4ms/step - loss: 0.2400 - accuracy: 0.9120 - val_loss: 0.3625 - val_accuracy: 0.8784\n",
- "Epoch 19/100\n",
- "1719/1719 [==============================] - 6s 3ms/step - loss: 0.2358 - accuracy: 0.9144 - val_loss: 0.3545 - val_accuracy: 0.8792\n",
- "Epoch 20/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.2298 - accuracy: 0.9159 - val_loss: 0.3646 - val_accuracy: 0.8754\n",
- "Epoch 21/100\n",
- "1719/1719 [==============================] - 5s 3ms/step - loss: 0.2249 - accuracy: 0.9177 - val_loss: 0.3575 - val_accuracy: 0.8800\n"
- ]
- }
- ],
- "source": [
- "# Tensorboard, earlystopping, ModelCheckpoint\n",
- "logdir = './dnn-selu-callbacks'\n",
- "if not os.path.exists(logdir):\n",
- " os.mkdir(logdir)\n",
- "output_model_file = os.path.join(logdir,\n",
- " \"fashion_mnist_model.h5\")\n",
- "\n",
- "callbacks = [\n",
- " keras.callbacks.TensorBoard(logdir),\n",
- " keras.callbacks.ModelCheckpoint(output_model_file,\n",
- " save_best_only = True),\n",
- " keras.callbacks.EarlyStopping(patience=5, min_delta=1e-3),\n",
- "]\n",
- "history = model.fit(x_train_scaled, y_train, epochs=100,\n",
- " validation_data=(x_valid_scaled, y_valid),\n",
- " callbacks = callbacks)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "{'loss': [0.6965022087097168, 0.47506192326545715, 0.4210342466831207, 0.3905491232872009, 0.3661811649799347, 0.34854137897491455, 0.33382126688957214, 0.3208833336830139, 0.31041842699050903, 0.2995002269744873, 0.2901705205440521, 0.2823897898197174, 0.27397868037223816, 0.26680049300193787, 0.2602353096008301, 0.25375181436538696, 0.24609822034835815, 0.23995651304721832, 0.23581187427043915, 0.22981438040733337, 0.22490321099758148], 'accuracy': [0.7548909187316895, 0.8290908932685852, 0.8478727340698242, 0.8592727184295654, 0.8674908876419067, 0.8716909289360046, 0.8781999945640564, 0.8829818367958069, 0.8862181901931763, 0.8898727297782898, 0.8937636613845825, 0.8960727453231812, 0.899472713470459, 0.902436375617981, 0.9040181636810303, 0.9075272679328918, 0.9102181792259216, 0.9120363593101501, 0.9143636226654053, 0.9159454703330994, 0.9176727533340454], 'val_loss': [0.4952741265296936, 0.4480682611465454, 0.42396080493927, 0.40337103605270386, 0.3859851658344269, 0.3813505470752716, 0.37284961342811584, 0.36732274293899536, 0.3666156530380249, 0.3678617775440216, 0.3608609735965729, 0.36372920870780945, 0.3621857464313507, 0.36002227663993835, 0.36489173769950867, 0.3514963984489441, 0.3536582589149475, 0.36246153712272644, 0.3544749319553375, 0.364633172750473, 0.35754767060279846], 'val_accuracy': [0.8294000029563904, 0.8398000001907349, 0.8507999777793884, 0.8564000129699707, 0.8622000217437744, 0.8677999973297119, 0.864799976348877, 0.8695999979972839, 0.8687999844551086, 0.8676000237464905, 0.8708000183105469, 0.8709999918937683, 0.8700000047683716, 0.8763999938964844, 0.8727999925613403, 0.8755999803543091, 0.8733999729156494, 0.8784000277519226, 0.8791999816894531, 0.8754000067710876, 0.8799999952316284]}\n"
- ]
- }
- ],
- "source": [
- "print(history.history)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeMAAAEzCAYAAAACSWsXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABJgklEQVR4nO3deXyU5b3//9c1+0xmspONBAICsiMIqLiBKy4FW6VUW6u01WNb9bT2HI+ttvqzdrF29dTjUr9aaVWKerQe10ohpVRUEJEdRASSQEL2ffbr98c9mUw2EiBkJpPP8/G4H/d2zcx1ZTLznuuae+5baa0RQgghRPyY4l0BIYQQYriTMBZCCCHiTMJYCCGEiDMJYyGEECLOJIyFEEKIOJMwFkIIIeKszzBWSj2llDqilNrWy36llHpYKbVXKbVFKTVr4KsphBBCJK/+9Iz/CCw8yv7LgPGR6Wbg0ROvlhBCCDF89BnGWuu1QO1RiiwGlmvDe0C6Uip/oCoohBBCJLuB+M54JFAas14W2SaEEEKIfrAM5oMppW7GGMrG6XSeXlRUNGD3HQ6HMZmS43g0aUtiSpa2JEs7QNqSiJKlHTDwbdmzZ0+11npET/sGIozLgdhULYxs60Zr/QTwBMDs2bP1xo0bB+DhDSUlJcyfP3/A7i+epC2JKVnakiztAGlLIkqWdsDAt0UpdaC3fQMR+a8CX40cVX0m0KC1PjwA9yuEEEIMC332jJVSzwPzgWylVBlwL2AF0Fo/BrwBXA7sBVqBZSerskIIIUQy6jOMtdbX9rFfA98esBoJIYQQw0xyfMsuhBBCDGESxkIIIUScSRgLIYQQcSZhLIQQQsSZhLEQQggRZxLGQgghRJxJGAshhBBxJmEshBBCxJmEsRBCCBFnEsZCCCFEnEkYCyGEEHEmYSyEEELEmYSxEEIIEWcSxkIIIUScSRgLIYQQcSZhLIQQQsSZJd4VEEIIIU6KcAhC/sgU6GU52GuZnMpdwPxBqaqEsRBCiOMTDkPIB0EvBI25q6UUDn8cXe8+j1kOBToCMBzssuzv2B8OdFmOhGjscsgfUy4SqDp8Qs2bYE4B7h2Yv1UfJIyFEGIoCYcg0GaEWew80AbBNgh4jXnQ17nHF/T10Dv0de8RBnvqJbbv6xKsIX+36s0F2HAM7VEmMNvAZAVz+2QDk6XLss1YtzjAntpR1hQpY24vY+u4XddlUz/KRJetbNiwibMG6nnrg4SxEEIMJK0h0Ar+FvA3G3Nfc+f1HpYnlR+Aij8Yt20P1IDXWA96O7b1EIDHzGzvHDwWWw9hZAdbCpgzOrZZnWCxG4HYy3z7nk+ZMn1WzPZeyrbXwZS4hy75HKWD9lgSxkKI5BEORcKsrcvc23mos9uwaNdh0K7lIj3FcMz3iz2GamRC96++ygQ2N1hdeEIKyASrA6wucGUbyxZnxzaLIxKIkfXY/Ransa992RIJO4u9e+9QqZP2FFTVl8DE+Sft/pOVhLEQYvCEwxBoAX9rR4gFYpbbt0dDtGuwRpb9xvrcxhr4kI79Id/A1VWZY3qKPQyH2lKMKa2oY9nmjlnuut7DssURDcYPSkqYP3/+wNVfDCkSxkIMd+HQUQ626WUe8nUstw+lxvYMAy3de4v+FmOY9ViY7ZHengtsrs7LKdk06QxcI0dHeolOI+Cszo5yVidYUzoPw8Z+/xgN2S6Ba7Ik9PCpSD4SxkIMJeGQ0XP0NUfmTcYUXW4Gf1PHcqd9xvKZTTXwAcaBOsHI8O2JsqYYAdne67O6jLk7t6M32L7N1l7W3fN2a0pHqJrMR33YnSUl5EpvUiQBCWMhTqZQMDIEG3sgT+x601H2xay3h2ugpX+Pa7KA3QM2jzG3u8GZAelF1FmayC8cc9SDcLDYuqwfpazZdlK/gxRiOJAwFiJ69Gtrx/eZ7cOuMfORZR/D2o19lG3tHKhBb//rEe0duo3wjPYsx0YC1RPZ5+nYb0+NWY4pY7H3GpC7S0rIl96kEAlFwlgMfeGwMTTbVg/eBvC2zxv6sa2x373N8QB7MY6AbR+WtbpihlpTICUnEo4pHQEZe8BO7HrsstUl31EKkQC01mifj1BjI+YjRwbtcSWMRWIJh42QbD4CLVXdp9aangP1qD8lUeBIBUcaONKNefY4Y25P6/JdZdeAdUXDct2Gjzhn/iVH7XWKxKL9foJ1dWivF5PLhcnlQjmdKPngA0SCp7WVUGOjMdU3EGpsINzYSKihsWO5voFwSws6HIZQyJiHw+hwCEIdc8JhMhsb2ferX/dcJqw7bq81JocDU0pKH5MrumzupYwymzu1KdzSSripkVBTE+GmJkKNjcY8ut4U2d9stK+pyZg3G+s6YBxHkZmSAl/84qA8FxLG4uQLeGMCtRpajnQsNx/psr0adKj7fSgTuLKMyZEOqQWQM6lzwDrSwJnefZs9dUB6nUHrp8ZvOEXc6HCYUEMDoZoagjW12D/8kNryckI1tQRrayLzWmN/bS3hxsbud6IUJqcTleLC7EpBpRghbUpJ6TzvtBwJhfa5w4H2+Qh7fYS9bWivl7DXa8zbvGhfZB7d3hZTNjJv8xL2eSNzHyNCIT5xu1E2Gya7DWW1oex2lM2GstuM7TZ7p20mmw1la1+3o2xWTJH9Ohgy/laNDYQbImHbvtzQEA1ggsHe/+AmE+bUVExpqZHQsxgfZMxmlMmEMpnBasVkMoPZhFImQhYLttwcMJlRZlPkDFuRsmaTcXuTGRRG21tbCLe0EGpuIlBZYQRpi7GNUA/vBT1QkVAnGCTU1GR8qO+rvMeN2ZOK2ePBnJaGragQkycVc6oHk9uDOdXDrvLyfj3+QJAwFkcXCkQOHmo0eqDty71t67TeyDmNR6Cktef7tqZASjakjID0Ihg501hOyenYnjIC3DnGwUd9HFnbFx0OE25uNj4VNzYY89YWQBlvGiYzmJTxKdtkMubKFN1nOViKd/duUF3KmEyd3qCwWFBms7EvsozZjBrA3rQOh9FtbYTbp9Y2dFtrp/VwW6tRprW9nLGeevAg5a+/EbkjbUzty2h0dJ3O+9qXu5YBlElB+xu1xdzxxmu2ROftf0dlMUfeqNvLRP4+MfvCLS0x4RoTsnV1nd6g04FKAKUwp6djzsrEkpmFfdJEUjKzouvKYTf+Fi0thFtbjTf81pbI3HjzD1XXEGgt7SjT2trnm/pRmc2YHA6U02kEpNOBye5AOR2YPamoESMwOZwohx2T3UF5aSkZI7LRPj/a70f7fOiAn7DPT7i11Vj3+wn7fZ3L+Ps4I5dSmFJTMbdPaWlYCvIxp6ZF1lON/WnpmNOMMqbUNGN7SsoxjyJ8WlLCzAE4JkFrbXyYiQRz+xTqtN7aaZ+yWDClejB7Uo2wTU3F5PEYc3fHuslm61cdfCUlJ9yO/pIwHm60hrY6aCiDxkPQWB6ZH4KmQ8bQb2yodvldqNYQDipCPhMhv8mYByyEQi5CQTuhoI2Q30zIpwh5rfhDBThSU1EuFyaXG1OKB+VOw5SagUrxYHI4jTcoixOTcmLSDlTIiSngxOR1oHBi0kFMgSaU0wlAqKHBGGpqaDSGmiKf8MONjYQamyJDa8aQVHQ5MkzVESjHLgv47ET+9rEB3R7SFnMkqLouRwLdYkEHgx1hGglW7TvGk1uYTEZv0OXEFta0HTrcMdSuQNG+rDqm6Hr7ooKu5dqFw8bQYzBofFAIBTuGJoOhTvsIhdChUJ+9HlNKCuasLCyZmViLinDOmBENV3NWJpasLDZ9+ilnLVyIOS0NZRnYt7NoGLR2BHZ7kGuf1+iJ2u3G3zUyNzkcRq/L4UBZrcf0eLtKSjj9OEJMa40OBDqFs/b5wGzGnJaGye0eksPySinjg4zTCdnZ8a7OSSdhnEy0NoZ5owHbyzxyhK/WEA4oQn4rIUsOQZVOKOggFBhByJ9LyAuhtjCh1iChVj+hZi+hpjZ0r8NaYUypNqOHkpGOuTCNtppaHC4XIa+XYHUr4YM1xrBdpAd3IuHYE+V0RoadUjF5UrHm5GIaNy7SC/AYPQBPanS/KSUl8oeI+R4sFDJ6ROEwOhQ29oXCbNvyMVMmTz5qmeg8FEQH20MniA4EO5ZjtwdDxt+zp+2hoDGEaLZgLSiIhqnJ6TLe+F1O443f6cLkMt60jDevLusulzGMGQnPkgQ505PWOnLVH+Nvr4OhyN8vFA22voS8XixZWSelfp3C4CQ9xkBQSqFsNrDZwO2Od3XEcZIwHkq0Nr5jrfkEqj+Bus+ivVrdUEa4tsIITp/RYw36TEbQ4iEUTiEUcBHyTSDYFibU4ifUFDsMFwZqOx7LbO4I1fRsrEXpONLTsaSnY87IMPa1T+3rqandeif7Sko4rZc3fq218Sm+rY2w12sMtXojy21tkcD2dny/Fgnv6LBaZDJFw7X/w0/Hw2dSpCZAiCULpRS0D1HHuzJCxJmEcQIyBdoI7VlP6MA2QmV7CB7eR6iynFDNEaOH6msfHjYTCtiN0PWGIdTLp3ezCXOGA0tGOubMDOzt4ZmRgTkjHUtGRkegRuYmt3tAv+PsiVIKZbeD3c6JfRsshBBDm4TxINDBIIHycoJHjhCsrydUV0eovp5QZRmhI2WEqiuNbU0thFr8jPBp9uiegtAOJgfmVDfm9AzMOdlYMzNwZmQY6+2h2h6wkXA1eTwnPViFEEIcPwnjAaLDYYJHjuDfvx///gOReWQqK4Vg94NVlDmM2aYx28OYHQp7VgrmU3JpstjJnjAFc/4YzIWnYh6RFx0SlmAVQojkI2F8DLTWhOrr8X8WCdkDMaF78CC6rePIY2WzYst2Ynd58UxowOYOYHWFMWfnREPWlH8qZE+A7PHgyY8enbq7pIQJ8t2kEEIMGxLGPQg1NeE/eJDAgQP4oj3cA/gPHCDc0NBR0GLBVliIbdRIUk7Nw2ZvwBbehy3wCRZnCGV1QNFcKP4KFJ8DBacZV6IRQgghYgzLMNZaE6qrI3DwIP6DB/EfOIi/9CCBA8Z6qK6uU3lLQT724mJSL78Me3ExtoIcbNZqrK07UKX/gsMvAtq49mrRXCj+ghG+hbONUycKIYQQR5G0YazDYYJVVfgPHCBQWmoE7sGD0QAONzd3FFYKS34etlGj8Vx8MbZRRViLRmErLsY2qgiTCsDB92D/Wtj/NLz7Meiwcem4wrkw/y4jfEfOltMlCiGEOGZJEcb+/ftx/uMfVL7/QSRwD+AvLUN7Yy5fZ7FgGzkS6+hRpM2ciW30KKxFRdhGj8ZaWNj996n718Enf4a16+DQZuN8yWYbFM6B8/4Tis81er4y7CyEEOIEJUUYt276iNTnV1Bntxu92lGjSTnn3MjyKCNw8/L6f7q8db+FVfeCyWoE7rnfM3q+RXMlfIUQQgy4pAhjz8UXsUUpzl286MTPwbruN7DqPph6NSz6vXEJPSGEEOIkSoowNns8hDPSTzyI//kr+Pv9MG0JXPUYmJPizyOEECLBSdq0W/tLWP1jmPZFuOpRCWIhhBCDRhIH4B8PwZoHYPqX4Kr/OeHr5gohhBDHol/jukqphUqp3UqpvUqpu3rYP0optUYp9ZFSaotS6vKBr+pJUvKgEcQzrpUgFkIIERd9hrFSygw8AlwGTAauVUpN7lLsHmCl1nom8CXgfwa6oidFyc+h5Kcw4zpY/IgEsRBCiLjoT894LrBXa71Pa+0HVgCLu5TRQGpkOQ04NHBVPEnW/AxKfganfQUW/16CWAghRNworfXRCyh1DbBQa/2NyPr1wBla61tjyuQDfwMygBTgIq31hz3c183AzQC5ubmnr1ixYqDaQXNzM263u++CWlO8/3mKD/yFw3kXsfvUb4M6waOwB1i/2zIESFsST7K0A6QtiShZ2gED35YFCxZ8qLWe3eNOrfVRJ+Aa4MmY9euB33cpcwfwvcjyWcAOwHS0+z399NP1QFqzZk3fhcJhrf/+Y63vTdX6lW9rHQoNaB0GSr/aMkRIWxJPsrRDa2lLIkqWdmg98G0BNupeMrE/XcJyoChmvTCyLdbXgZWRcF8POIDsftz34NHa+OnS2odg1lfhcw/Dif4uWQghhBgA/UmjDcB4pdQYpZQN4wCtV7uUOQhcCKCUmoQRxlUDWdETorVxMo9//gpOvxGu/J0EsRBCiITRZyJprYPArcDbwE6Mo6a3K6XuV0otihT7HnCTUupj4HngxkiXPP60Nk5vue7XcPoyuOI3EsRCCCESSr9O+qG1fgN4o8u2H8Us7wDOHtiqDQCtjQs+/Ot3MPvrcPkvJYiFEEIknOQ9A5fW8M4P4d3/hjnfMIJYqXjXSgghhOgmOcNYa/jbPbD+9zD3ZrjsFxLEQgghElbyhbHW8Pbd8N4jMPff4LIHJYiFEEIktOQKY63hre/D+4/CGd+EhT+TIBZCCJHwkieMtYa37oL3H4MzvwWX/lSCWAghxJCQHGGsNeP2/gHKX4czvw2X/kSCWAghxJCRHL/zef8xCstfh7NulSAWQggx5CRHz3jmV9jz6WdMuOQBCWIhhBBDTnL0jO0eDo28XIJYCCHEkJQcYSyEEEIMYRLGQgghRJxJGAshhBBxJmEshBBCxJmEsRBCCBFnEsZCCCFEnEkYCyGEEHEmYSyEEELEmYSxEEIIEWcSxkIIIUScSRgLIYQQcSZhLIQQQsRZUoRxsy/IR0eC8a6GEEIIcVySIoxf3FjK7zb52HukKd5VEUIIIY5ZUoTxZdPyUcBrWw7HuypCCCHEMUuKMM5NdTAhw8TrEsZCCCGGoKQIY4C5+RY+OdLMnkoZqhZCCDG0JE0Yn55rxqTgtY8PxbsqQgghxDFJmjBOt5s4Y0wWr209jNY63tURQggh+i1pwhjgyhn57KtqYVeFDFULIYQYOpIqjBdOyTOGqrfIULUQQoihI6nCOMttZ94p2by+RYaqhRBCDB1JFcYAV07PZ39NK9sPNca7KkIIIUS/JF0YXzolD7NJyQlAhBBCDBlJF8YZKTbOHpfN61sPyVC1EEKIISHpwhiMoerS2ja2ljfEuypCCCFEn5IyjC+dnIfVrOT0mEIIIYaEpAzjNJeVc8Zl85ocVS2EEGIISMowBrhiegHl9W1sLq2Pd1WEEEKIo0raML54ci42s1zJSQghROJL2jBOc1o5b0I2r289TDgsQ9VCCCESV9KGMcAV0/M53ODlo9K6eFdFCCGE6FVSh/FFk3KxWUxyAhAhhBAJLanD2OOwMn/CCN6QoWohhBAJLKnDGIyh6spGHxsPyFC1EEKIxJT0YXzRpFzsFhOvy2UVhRBCJKh+hbFSaqFSardSaq9S6q5eynxRKbVDKbVdKfXcwFbz+KXYLVwwMYc3tlUQkqFqIYQQCajPMFZKmYFHgMuAycC1SqnJXcqMB74PnK21ngJ8Z+CrevyumJ5PVZOPDz6rjXdVhBBCiG760zOeC+zVWu/TWvuBFcDiLmVuAh7RWtcBaK2PDGw1T8wFE3NwWs28vlWGqoUQQiSe/oTxSKA0Zr0ssi3WBGCCUupfSqn3lFILB6qCA8Fls3DBpBze2lZBMBSOd3WEEEKITlRfF1JQSl0DLNRafyOyfj1whtb61pgyrwEB4ItAIbAWmKa1ru9yXzcDNwPk5uaevmLFigFrSHNzM263u9f9GyqCPLLZx51zHEzOMg/Y454MfbVlKJG2JJ5kaQdIWxJRsrQDBr4tCxYs+FBrPbunfZZ+3L4cKIpZL4xsi1UGvK+1DgCfKaX2AOOBDbGFtNZPAE8AzJ49W8+fP79fDeiPkpISjnZ/Z/hDPL3jHUpVDt+aP23AHvdk6KstQ4m0JfEkSztA2pKIkqUdMLht6c8w9QZgvFJqjFLKBnwJeLVLmVeA+QBKqWyMYet9A1fNE+e0mbloUi5vbTssQ9VCCCESSp9hrLUOArcCbwM7gZVa6+1KqfuVUosixd4GapRSO4A1wH9qrWtOVqWP1xXT86lrDfDupwlXNSGEEMNYf4ap0Vq/AbzRZduPYpY1cEdkSljnTxiB227h9S2HOW/CiHhXRwghhACGwRm4YjmsZi6enMtb2ysIyFC1EEKIBDGswhjgimn5NLQF+Nfe6nhXRQghhACGYRifOyEbj8Mil1UUQgiRMIZdGNstZi6ZnMfb2yvwB2WoWgghRPwNuzAGuHJ6Pk3eIOv2VsW7KkIIIcTwDOOzx2WT5rTy2scyVC2EECL+hmUY2ywmLp2Syzs7KvEGQvGujhBCiGFuWIYxwBXTC2jyBfnnJ3JUtRBCiPgatmE875Qs0l1WXtsil1UUQggRX8M2jK1mEwun5LFKhqqFEELE2bANY4ArpxfQ4g9RsluOqhZCCBE/wzqMzxybSWaKjde3ylHVQggh4mdYh7HFbGLh1Dz+vrOSNr8MVQshhIiPYR3GYJwApNUfYs3uI/GuihBCiGFq2IfxGWOyyHbbeF3OVS2EECJOhn0Ym02Ky6bm8/ddlbT4gvGujhBCiGFo2IcxGEPV3kCY1btkqFoIIcTgkzAGZhdnkuOxy1C1EEKIuJAwxhiqvnxaPmt2H6FZhqqFEEIMMgnjiCun5+MLhvn7zsp4V0UIIcQwI2EcMWtUBnmpDl6ToWohhBCDTMI4whQZqv7H7iqavIF4V0cIIcQwkhRhXNpUyvM1z1PvrT+h+7liej7+UJh3dshQtRBCiMGTFGH80ZGPeK/5PRa9sojX9r2G1vq47mfWqHRGpjvlqGohhBCDKinCeNEpi7gz/06KPEV8/5/f55ZVt1DaVHrM96OU4vJpeaz9pIqGNhmqFkIIMTiSIowBRtpGsvyy5Xx/7vf5uOpjvvDXL/D0tqcJho/tp0pXTC8gENL8bXvFSaqpEEII0VnShDGA2WTmuknX8criVzir4Cx+/eGvufb1a9lWva3f9zGjMI3CDKdcVlEIIcSgSaowbpeXksfDFzzMb+f/ltq2Wr78xpd58IMHaQ209nlbpRRXTM9n3SfV1Lf6B6G2QgghhrukDON2F46+kFeueoUlE5bw7M5nWfzXxfyj9B993u7KaQUEw5q7X94mP3MSQghx0iV1GAN4bB7uOfMell+2HLfVza2rb+V7Jd+jqrWq19tMK0zjPy89lTe3HeaKh9exubR+8CoshBBi2En6MG53Ws5prLxyJbfNvI2S0hIWv7KYF/a8QFiHeyz/7QXjWPlvZxEKa6559F3+p2Qv4fDx/WRKCCGEOJphE8YAVrOVm6ffzEuLXmJS1iTuX38/y95axr76fT2Wn12cyRv/fi6XTs3jF2/t5vqn3qey0TvItRZCCJHshlUYtytOK+bJS57kx2f/mE8bPuXq/7ua/9n8P/hD3Q/YSnNa+f21M/nF1dPZdKCehb9dyyo5Q5cQQogBNCzDGIyjpq8adxV/XfxXLi2+lEc/fpSrX72ajRUbeyz7xTlFvHb7OeSnOfnG8o3c+9dteAOhONRcCCFEshm2Ydwuy5nFz8/9OY9d9BiBcIBlby/jvnfvo8HX0K3sKSPcvPzteXz9nDE8s/4AVz3yLz6pbIpDrYUQQiSTYR/G7c4eeTYvL36ZZVOX8creV1j8ymIe2fwIu2t3dzrXtd1i5odXTubpZXOoavLxud+v49n3Dxz3+bCFEEIICeMYTouTO06/gxVXrmBcxjge//hxrvm/a7jy5Sv5zYe/YWvV1mjoLjg1hze/cy5zijO5++VtfPPPm+QkIUIIIY6LJd4VSEQTMyfy5CVPUt1WzZrSNaw6sIrl25fz1LanyEvJ48JRF3LRqIuYmTOTZ5bN5cl1+3jo7d1c9rt6frP0NM4cmxXvJgghhBhCJIyPItuZzZIJS1gyYQkNvgb+UfYP3jnwDi/sfoFndz5LpiOTC0ZdwMWnXMzKfzuDO1Zu5bo/vMetC8Zx+4XjsZhl4EEIIUTfJIz7Kc2exqJTFrHolEW0BlpZW76WVQdW8fq+13lxz4t4bB7OnXM+RYfG8/CaAP/6tIbfLj2NokxXvKsuhBAiwUkYHweX1cXC4oUsLF6IN+hl/aH1rDq4ijWla2jyv0bWZAd7Gk/l8v83jfsuupqrZ46Ld5WFEEIkMAnjE+SwOFgwagELRi0gEA6w4fAG3jn4Dqv2/51698fcu3kF/7N9OrfMXszFxReQ7kiPd5WFEEIkGAnjAWQ1WZk3ch7zRs7jnjPuYUPFJn79r5fY3rCO+9+/lx+/fx9Tsqcwr2AeZxeczbQR07CarPGuthBCiDiTMD5JzCYzZxbMYeWSOfxrbxXfefk1GtQW9gQ/ZXv1kzyx5QncVjdz8+Zy9sizmVcwj0JPYbyrLYQQIg4kjAfB2eNGUHL79fx18yH+/N4BdlRWkpK2j6yicrZW7WB16WoARnlGMa9gHqmtqcwJzCHFmhLnmgshhBgM/QpjpdRC4HeAGXhSa/3zXspdDbwIzNFadz/J8zCWYrdw3RmjuHZuEZtL6/nze+N5bcshfMGLmTrax+RTKmhS2/nrp3+lLdjGUyue4rQRpzGvwBj2npQ5CZOSn0oJIUQy6jOMlVJm4BHgYqAM2KCUelVrvaNLOQ/w78D7J6OiyUIpxcxRGcwclcEPr5zEix+W8dz7B1m52kG6azxfmPVv2ALvYR/VyvpD63n4o4d5+KOHybBncGbBmZxdcDZnFZxFjisn3k0RQggxQPrTM54L7NVa7wNQSq0AFgM7upT7MfAg8J8DWsMklu6y8Y1zx/L1c8aw/tMann3/IMvfLScYLuScmmyWnfElZi6wsKHyPdYfWs+7h97lzc/eBGB8xnjOyj+LWbmzmJkzk0xHZpxbI4QQ4nj1J4xHAqUx62XAGbEFlFKzgCKt9etKKQnjY6SUYt64bOaNy+ZIo5efv7CW96ta+Oazm8jx2PnS3FO5dc5FPHCOnU/qPuFfh/7Fu+XvsmLXCpbvWA5AcWoxM3NmRqfRqaNRSsW5ZUIIIfpD9XW1IaXUNcBCrfU3IuvXA2dorW+NrJuA1cCNWuv9SqkS4D96+s5YKXUzcDNAbm7u6StWrBiwhjQ3N+N2uwfs/uKpubkZV0oKW6pCrC4NsrXKuG7yaTlmFhRZmJptxqQUAR2g1FfKPt++6NQSbgHAbXIz1j6WsY6xjLWPpchWhEUN/vF6yfa8JENbkqUdIG1JRMnSDhj4tixYsOBDrfXsnvb1J4zPAu7TWl8aWf8+gNb6Z5H1NOBToDlykzygFlh0tIO4Zs+erTduHLhjvEpKSpg/f/6A3V88dW1LaW0rz39wkJUbS6lu9lOU6eS6uaP54uxCstz2aDmtNZ81fsZHlR+x6cgmPjryEaVNxqCG3WxnavZUZuUYw9ozcmaQaksd9LYMZcnSlmRpB0hbElGytAMGvi1KqV7DuD9dpQ3AeKXUGKAc+BJwXftOrXUDkB3zYCX00jMWx6co08WdCyfynYsm8Pb2Cp59/wAPvrWLX/5tN2eNzeKyaXlcMjmPER47Y9PGMjZtLFdPuBqA6rZqPjrykTFVfsRT254ipEMoFOMyxjFzxExm5s5kVs4s8lPyZWhbCCHioM8w1loHlVK3Am9j/LTpKa31dqXU/cBGrfWrJ7uSwmCzmPjcjAI+N6OAvUeaePmjct7cWsHdL2/jh69sY05xJpdPy+fSKXnkpTkA48pTF4++mItHXwxAa6CVbdXboj3n1z97nZV7VkbLjnCOwGPzdJtSbal4bB7cVnendY/NQ4o1RX52JYQQJ6BfXyJqrd8A3uiy7Ue9lJ1/4tUSfRmX4+E/L53If1xyKnsqm3lj62He3HaYe1/dzr2vbuf00RlcNjWPhVPzKMzouHKUy+pibv5c5ubPBSAUDvFJ/Sd8dOQjtlVvo8HXQJO/iYNNB2nyN9Hkb6Il0HLUuigUbpu7U0C3h3ZrXSuHdx0mz5VHXkoe+Sn5pNnTpAcuhBAx5AxcQ5xSilPzPJya5+G7F09g75Fm3tp2mDe3VfDA6zt54PWdTC9M47Kp+Vw2NY/i7M5n9TKbzEzMnMjEzIm9PkYwHKQl0EKjvzEa0J2mQMdye5ny5nIa/Y1UtVSx6v1Vne7PYXaQl5JHbkpuNKSjU2TdbUuOA0CEEKI/JIyTzLgcN7deMJ5bLxjPgZoW3txWwZvbKnjwrV08+NYuJuWncvnUPC6blse4HE+/7tNispBmTyPNnnbM9Vm9ZjXTz5xORUtFdDrccthYbq1g/eH1VLdVE9bhTrfzWD1GWHcJ6RGuEaTb06OT0+KUXrYQYsiTME5io7NSuOX8U7jl/FMor2/jrW0VvLn1ML96Zw+/emcP43PcXDY1j8um5TMxz3NSQs2kTGQ7s8l2ZjM1e2qPZQLhAFWtVR2B3VrRKby3V2+nzlfX422tJivp9nTS7GnRgO5x2dGxnGpLxWKSf30hROKQd6RhYmS6k6+fM4avnzOGykYvb2+v4I2th/n9mr08vHovxVkuFk7N5+xxWcwclYHbPnj/GlaTlQJ3AQXugl7LeINeKlsrqWqtosHXQL2vngZ/ZO5roN5bT72vns8aPotuC+pgr/fnsXpIs6eRak/FoiwopTArc6e5CRMmkwkTJszKTE1NDa+seQWTMnWazMqMQkXXwzpMSIcIhAOEwiFCOkQoHCKogwTDwe7rkTLBcM/7HRZH9ANNtjObLEdWp/X2yWV19dpeMTBC4RDNgWZ8IR/Zzmw5cDGBBEIBlFJD9oP20Ky1OCG5qQ6+elYxXz2rmOpmH3/bXsmb2w7zh3/u47F/fIrZpJicn8rs4gzmFmcyuziTER5733d8EjksDkanjmZ06uh+ldda0xJo6QhrX310ioZ55GC19vDUWkdDUaM7bdNa0xBsoK2xrWMbmlDYmMfeh0mZsJgsmJXZmJvMWFTndavJisPkiG5rL99eNna9LdhGdVs1Va1V7KzZSa23lpAOdWuz0+LsNbSznB3L3rCXWm8t3qAXb9BLW6gtuuwNeTsttwXb8IV8RrlgW+cykbkv5Iu22WqyRttjNVmNdWXBao5sb1+OtLHTcsxtbWYbNpMNu9mO1WzFZjaWbSabsS+yvylkHKtgN9uxmqz9Gt3xh/w0+huNydcYPc6hx/X25cj25kBz9H5cFhfjM8YzIWNCdBqfMR6PrX9f/yQ6X8hHZUslh1oOcbj5ME3+Jka4RpDjyiHHlUOuKxeb2TaodWr0N1LaVEppUyllTWWUNZVF1ytaKgDIcGT0+GG16+S2uhPqKy4J42Eu223nujNGcd0Zo2j2Bdl0oI6N+2v5YH8tz39wkKf/tR+AMdkpzB6dwZziTOaMyaQ4y5VQ/8hdKWUc4e22uQfsOtGJcjKDUDhEva+e6rZqatpqqPZWU93WMdW01fBp/ae83/Y+jf7Gnu/kL8f2mE6LE4fZgd1ix2F2GOsWBy6Liwx7RkfPXgfxBX20hFsI6iCBUCDaw29fDoQDxnpkfsKe71jsFNaRwLaZbWito8HqDXn7bGv7z/dSbankufKYkDEh+muB9q85Pmv4jD11e3hr/1u8sOeF6O0LUgqiwTwh0wjpUZ5RCdVj01pT56szjt9orjACN3Isx+HmwxxuOUyNt6bP+8mwZxjBnJIbDei65jqs5VZyXbnkpOTgsfb/K7CwDnOk9Ug0bGPnpc2lNPgaOpXPdGRS6ClkVu4sCt2FKKWM10Gr8VrY17CP6rbqHv/PHGZHpw+psdMI5wiyndnUBXv+euxkSJz/DhF3bruF8yaM4LwJIwDwB8NsO9RghPNndbyzs5IXPiwDjBCfU5zB7OJM5hRnMDk/FYtZhuwGg9lkJsuZRZYzq8+y/pDfCOz2sPZW8/HOj5k8YXI0UB1mBw6LEbB2sz263L7dbraftA9eWutoWMcGtD/kxx/2EwgF8IV8xnpkmz/kxxfyEQgH2LZrG6PHjiYQ7lIuUtYX8hEIBQBItad2CtRUWyqp9i7rtlSsZusxt6GytZI9dXui0yd1n/DP8n9GRzDsZjunpJ/SrRc90Bd4CetwtO313noOtxyOThUtFRxqPhQ9iNIX8nW6rcPsIN+dT35KPqdmnhr9KWKBu4C8lDxSbalUt1VT2VJJZWslR1qPROdHWo+wrXobtd5aAJ5b9Vz0fp0WpxHMkbBu71lnOjOpaavp1Lstby7vVC+zMpOfkk+Rp4hLR19KoaeQIk8RRZ4iCj2F/brme1iHafQ1Rv//q1qroq+JqjZjeX/DfjZUbOj24dWhHHyez5/IU9JvEsaiVzaLiVmjMpg1KoObz4NwWPNpVTMb9texYX8tG/bX8uY2Y2jIZTMza1Sk51ycwWmj0nHZ5N8r3mxmm/EG686PbhtxaATzJ82PX6ViKKWwKmM4+3hklmcyf8r8ga3UMVJKRY/6P6/wvOh2f8jPvoZ9RkDXGiG9rnwdr+x9JVom25ltBHP6eCrqK/hw44edPnS0f/CIXe/pw0Z7maONNGQ7s8lPyWd8xnjOLzyffHd+NHDzU/JJt6f3+aErzZ7GKemn9LrfH/Lz6upXGTt9bDSso8HdUsmHlR9ypPVIp+M5XBYXRZ4ixqSN4bzC86JBW+QuIs+dd9z/G+1MykS6wziIcxzjjlo29sNrVVsVm7ZuOqHHPhbybin6zWRSjM/1MD7Xw3VnjALgcEObEc6fGeH827/vQWuwmBRTRqYxQvmo9pQxdWQq40a4pfcshg2b2dbxG/6Y/Kppq+GT+k+iAb2nbg/P73qeQDiAvdnebYg9dtlpcZJuT8dmtmE1WY3v0Xsp77F5jAMjUwrITRmc73dtZhvZ1mxm5c7qtUxYh6n11lLTVkO2M5tMR2bCfOXV9cOrad/gvV9JGIsTkp/mZNEMJ4tmGEdCN7QG2HSwjg/217Jxfy1rS4OseuFjAOwWE5PyU5k2Mo2pI1OZOjKN8TkebBYJaDF8tH/FcGb+mdFtWmtKSkpYsGBBHGs2OGJ/7ig6SBiLAZXmsrJgYg4LJuYAsHrNGoomz2bboQa2ljWy7VADL39Uzp/eOwCAzWxiYr6HKQVp0ZA+Nc+D3WKOZzOEGFRKqYTpHYr4kDAWJ5VJdQxtf36msS0c1uyvaWHboUa2lTewrbyB17cc4vkPDgLGEPeEXE+nHvSk/FQcVgloIURykjAWg85kUowd4WbsCHd0eFtrTWltG1vLG9h2yAjov+2o4C8bjesxm02K8TluJucbPecJeR4m5nnIS3VIj0IIMeRJGIuEoJRiVJaLUVkurphuHDyhteZQg5etZQ1sP9TA1vIG/vVpNf/7UXn0dqkOixHOucbFMk6NzNNdg3syAiGEOBESxiJhKaUYme5kZLqThVPzotvrW/3srmhiT2UTuyub2F3RxKsfH6Lp/Y6fS+Sm2o2Abg/pPA/jczw4bTLULYRIPBLGYshJd9k4Y2wWZ4ztOOmF1pqKRm80pHdF5n967wC+oHFFKKVgdKaLCbnGEPeESI96VKZLvo8WQsSVhLFICkop8tOc5Kc5mX9qTnR7KKw5UNNi9KIrmtld2cjuiiZW7awkrNtvCwVpTkZluijOdjE6K4XiLGM+OsslJy8RQpx08i4jkpo55mCxhTFXcPQGQnxa1cwnlc0cqGnlQE0L+2ta+Nv2Smpa/J3uI8djpzgrBXvAx3a91wjtrBRGZ7tIdZzY2YGEEAIkjMUw5bCamVKQxpSCtG77Gr0BDta0sr+mJSaoW9laHeKfb+/uVDYzxcborEg4x8zHZrtJc0lQCyH6R8JYiC5SHVamjkxj6sjOQV1SUsLceedwsLaV/dUdIX2gpoUPPqvllc3laN1RPsNlZUx2CsXZKYyNzIuzUhiTnULKIF4vWgiR+OQdQYhj4LJZmJiXysS81G77vIEQZXWtfFbdyv7qFvZVt7C/uoV399bwv5vKO5XN8dgZk50SDev2ZTmYTIjhScJYiAHisJoZl+NhXE73i8u3+oPsrzaGvj+r7pje2dH5O+r2g8lig7o4y0VhhouRGU7c0qMWIinJK1uIQeCyWZhckMrkgu496oa2APurjQPI9lW1RAP7lc3lNHk7XxIvw2WlMMNFYYYzMrmicwlrIYYueeUKEWdpTiszitKZUZTeabvWmpoWP6W1rZTVtUUmY3lPZROrdx2J/oa6nYS1EEOTvDKFSFBKKbLddrLddmaOyui2X2tNdbM/GtD9DetUS4jxBzaQk+ogLzLlpNrJSzOW05xWOd+3EINMwliIIUopxQiPnRGe/oV1eb0R1ls+Lae83sumg/XUdvlNNRjXnc6NDelUB3lpjm7hLQeaCTFwJIyFSFK9hXVJSQ3z558LgC8Y4kijj8pGL5WNPioavVQ2eqloMObbyhtYtbMSbyDc7f7TXdZIMDsoSHNEzoDmID/dWC5Id8jZy4ToJ3mlCDGM2S1mijJdFGW6ei2jtabRG+wU0pWNXioavVQ0GEG+41Aj1c2+brdNc1qNgE5zkJ/u7AjtdAcFaU7y0hzSwxYCCWMhRB+UUqQ5raQ5rUzI7f6zrXa+YIjKBh+HGto43NDG4QYvh+u9HG5o41C9l82l9dS1BrrdLjPFFglsozedn+YkN9VOjsfBCI+dHI+ddJd8jy2Sm4SxEGJA2C3m6DWpe+MNhCIh3cahmHlFg/F99gef1dDY5edcAFazYoTbzohUByPcdnJSjZCuOxQgsKMyGtrZbjs2i+lkNlOIk0LCWAgxaBxWc/SEJr1p8RlD4lVNPo5EJmPZ2FZW18qmg3XRg8+e2bGx0+0zXNZOveoRqXZGRI5Kz3bbyXLbyHbbyUyxYTZJb1skBgljIURCSbFbolfaOppAKMz/vVPCKVNmxQR3R4hXNfn4rLqFqiYf/lD3A9CUgkyXLRrOWW472ZHlbLeNrBQ72Z6ObfLdtjiZJIyFEEOS1Wwi02HqdrKUrrTWNLQFqG72U9PsM+YtPqqbfFS3+Klu8lHT4mdLWT01zX6afd2HyQHcdktHcKfYjKBOsZEV6W1npdgZ4THmaU4rJul1i2MgYSyESGpKKdJdNtJdNsblHL23DdDmDxlh3dwe1JHl9iBv9rG/poUPD9RR2+rvdKWudmaTIjMltpfdEdqxPe8s6XWLiIQK40AgQFlZGV6v95hvm5aWxs6dO09CrQZforTF4XBQWFiI1SrX5RXDh9NmptBmXJyjL6Gwpq7VT000rH3URHve/miQ769pobrJT1sg1OP9uO0W3OYQxXvWx5xwxUFuqj26PsIjoZ3MEiqMy8rK8Hg8FBcXH/PPGJqamvB4ev/ZxVCSCG3RWlNTU0NZWRljxoyJa12ESFRmU8cpS0+l79dsqz8YDe7ovMVPVZOP7ftKCYY0mw7WUdnowx/s+UQruR4HuWkOcj1GUHdaTnWQ7bZhMcsR5UNNQoWx1+s9riAWA08pRVZWFlVVVfGuihBJw2Wz4Mq09HiSlZKSKubPnwcYH4brWwNUNhlnRqts9HIkcqKVykYfRxq97KlooqrZRyjceZzcpCDbbZx5LTPFGCLPjAyJZ6bYYrYZQ+WpTou85yaAhApjQP4pEog8F0LEh1KKjBQbGSk2Jub1Xi4U1tQ0+6KBXdnkpbLBCO2aZj/VLcYQeW2znxZ/z0PkFpPxWNGAjhyg1jW4M1NseBxW3A4LLqtZDlAbYAkXxvHmdrtpbm6OdzWEEKJPZpMiJ/L98jTSjlrWGwhR2+KntsVPTYuf2hZjqDx2W02zj61l9dS0+LtdSzuWUuC2WXA7LHgcFuM7b4cVj91CU52Pdc07cEe2G/ut0bIeuyW6z22XXnk7CWMhhBgGHFYzBelOCtKd/SrvD4ajB6fVtvipazV+9tXsDdLkDdAUWW72GVNDW4DyulZqGkN8eORgrz3xWFazIsPV3iO3keEyeuLtPfWMmOH0jBQrma7k/T5cwrgXWmvuvPNO3nzzTZRS3HPPPSxdupTDhw+zdOlSGhsbCQaDPProo8ybN4+vf/3rbNy4EaUUX/va1/jud78b7yYIIcRxs0UupZmb6jim25WUlDB//nxCYR0NaiO0AzRFwrspEuh1rQFqm41eeV2rn+2HGqlt8dPQ1v0c5u1SHRayImdQ6xreaS5r9DzqsZPLZk74HnjChvH/93/b2XGosd/lQ6EQZvPRD/ufXJDKvZ+b0q/7+9///V82b97Mxx9/THV1NXPmzOG8887jueee49JLL+Xuu+8mFArR2trK5s2bKS8vZ9u2bQDU19f3u95CCJGMzKaOC4wcq0DI6JXXtQSoafFR1xIwhtVb/NS1dIR3WV0rW8rqqWv1Ewj18IPvCItJkRqpS2pMSKc6LD2Gd3uZlkDv9znQEjaM423dunVce+21mM1mcnNzOf/889mwYQNz5szha1/7GoFAgKuuuorTTjuNsWPHsm/fPm677TauuOIKLrnkknhXXwghhiyr2USOx0GOxwH9+MmY1pomX5CG1gANbR1TY1vn9Ya2AI1eY0i9tLY1uq3rEentHGa44uIBblwvEjaM+9uDbTdYv80977zzWLt2La+//jo33ngjd9xxB1/96lf5+OOPefvtt3nsscdYuXIlTz311EmvixBCCOPo81SHlVSHlaJjvK3WmhZ/qMfg3rFz10mpb08SNozj7dxzz+Xxxx/nhhtuoLa2lrVr1/LQQw9x4MABCgsLuemmm/D5fGzatInLL78cm83G1VdfzamnnspXvvKVeFdfCCFEPyilokd2dz24raT500Grh4RxLz7/+c+zfv16ZsyYgVKKX/ziF+Tl5fHMM8/w0EMPYbVacbvdLF++nPLycpYtW0Y4bJwx52c/+1mcay+EEGIo6VcYK6UWAr8DzMCTWuufd9l/B/ANIAhUAV/TWh8Y4LoOivbfGCuleOihh3jooYc67b/hhhu44YYbut1u06ZNg1I/IYQQyafPH2wppczAI8BlwGTgWqXU5C7FPgJma62nAy8CvxjoigohhBDJqj+/np4L7NVa79Na+4EVwOLYAlrrNVrr1sjqe0DhwFZTCCGESF5K93QxztgCSl0DLNRafyOyfj1whtb61l7K/x6o0Fo/0MO+m4GbAXJzc09fsWJFp/1paWmMGzfueNrRr98ZDxWJ1Ja9e/fS0NBw3Ldvbm7G7e77GrJDQbK0JVnaAdKWRJQs7YCBb8uCBQs+1FrP7mnfgB7ApZT6CjAbOL+n/VrrJ4AnAGbPnq3nz5/faf/OnTuP++dJiXDZwYGSSG1xOBzMnDnzuG/ffjaeZJAsbUmWdoC0JRElSztgcNvSnzAuh04/3SqMbOtEKXURcDdwvtbaNzDVE0IIIZJff74z3gCMV0qNUUrZgC8Br8YWUErNBB4HFmmtjwx8NYUQQojk1WcYa62DwK3A28BOYKXWertS6n6l1KJIsYcAN/CCUmqzUurVXu5OCCGEEF306ztjrfUbwBtdtv0oZvmiAa5X0gsGg1gscs4VIYQQ/RumHnauuuoqTj/9dKZMmcITTzwBwFtvvcWsWbOYMWMGF154IWAcabds2TKmTZvG9OnTeemllwA6HX334osvcuONNwJw4403csstt3DGGWdw55138sEHH3DWWWcxc+ZM5s2bx+7duwHjaOr/+I//YOrUqUyfPp3//u//ZvXq1Vx11VXR+33nnXf4/Oc/Pwh/DSGEECdb4nbN3rwLKrb2u7gzFARzH83JmwaX/fzoZYCnnnqKzMxM2tramDNnDosXL+amm25i7dq1jBkzhtraWgB+/OMfk5aWxtatRj3r6ur6vO+ysjLeffddzGYzjY2N/POf/8RisbBq1Sp+8IMf8NJLL/H000+zf/9+Nm/ejMVioba2loyMDL71rW9RVVXFiBEjePrpp/na177W9x9GCCFEwkvcMI6jhx9+mJdffhmA0tJSnnjiCc477zzGjBkDQGZmJgCrVq0i9rfSGRkZfd73kiVLor8hbmho4IYbbuCTTz5BKUUgYFxQu6SkhFtvvTU6jN3+eNdffz1//vOfWbZsGevXr2f58uUD1GIhhBDxlLhh3I8ebKy2AfptbklJCatWrWL9+vW4XC7mz5/Paaedxq5d/b+UllIquuz1ejvtS0lJiS7/8Ic/ZMGCBbz88svs37+/z9+zLVu2jM997nM4HA6WLFki3zkLIUSSkO+Mu2hoaCAjIwOXy8WuXbt477338Hq9rF27ls8++wwgOkx98cUX88gjj0Rv2z5MnZuby86dOwmHw9Eedm+PNXLkSAD++Mc/RrcvWLCAxx9/nGAw2OnxCgoKKCgo4IEHHmDZsmUD12ghhBBxJWHcxcKFCwkGg0yaNIm77rqLM888kxEjRvDEE0/whS98gRkzZrB06VIA7rnnHurq6pg6dSozZsxgzZo1APz85z/nyiuvZN68eeTn5/f6WHfeeSff//73mTlzZjR4wbgy1KhRo5g+fTozZszgueeei+778pe/TFFREZMmTTpJfwEhhBCDTcY5u7Db7bz55ps97rvssss6rbvdbp555plu5a655hquueaabttje78AZ511Fnv27ImuP/CAcTpvi8XCr3/9a3796193u49169Zx00039dkOIYQQQ4eE8RBy+umnk5KSwq9+9at4V0UIIcQAkjAeQj788MN4V0EIIcRJIN8ZCyGEEHEmYSyEEELEmYSxEEIIEWcSxkIIIUScSRgLIYQQcSZhfAJir87U1f79+5k6deog1kYIIcRQJWEshBBCxFnC/s74wQ8eZFdt/y/OEAqFoldD6s3EzIn819z/6nX/XXfdRVFREd/+9rcBuO+++7BYLKxZs4a6ujoCgQAPPPAAixcv7ne9wLhYxDe/+U02btwYPbvWggUL2L59O8uWLcPv9xMOh3nppZcoKCjgmmuuoaKiglAoxA9/+MPo6TeFEEIkp4QN43hYunQp3/nOd6JhvHLlSt5++21uv/12UlNTqa6u5swzz2TRokWdrszUl0ceeQSlFFu3bmXXrl1ccskl7Nmzh8cee4x///d/58tf/jJ+v59QKMQbb7xBfn4+b7/9NmBcTEIIIURyS9gwPloPtidNA3AJxZkzZ3LkyBEOHTpEVVUVGRkZ5OXl8d3vfpe1a9diMpkoLy+nsrKSvLy8ft/vunXruO222wCYOHEio0ePZs+ePZx11ln85Cc/oaysjC984QuMHz+eadOmcccdd/Bf//VfXHnllZx77rkn1CYhhBCJT74z7mLJkiW8+OKL/OUvf2Hp0qU8++yzVFVV8eGHH7J582Zyc3O7XaP4eF133XW8+uqrOJ1OLr/8clavXs2ECRNYu3Yt06ZN45577uH+++8fkMcSQgiRuBK2ZxwvS5cu5aabbqK6upp//OMfrFy5kpycHKxWK2vWrOHAgQPHfJ/nnnsuzz77LBdccAF79uzh4MGDnHrqqezbt4+xY8dy++23c/DgQbZs2cLEiRNxuVx85StfIT09nSeffPIktFIIIUQikTDuYsqUKTQ1NTFy5Ejy8/P58pe/zOc+9zmmTZvG7NmzmThx4jHf57e+9S2++c1vMm3aNCwWC3/84x+x2+2sXLmSP/3pT1itVvLy8vjBD37Ahg0b+N73vofFYsFqtfLoo4+ehFYKIYRIJBLGPdi6dWt0OTs7m/Xr1/dYrrm5udf7KC4uZtu2bQA4HA6efvrpbmXuuusu7rrrrk7bLr30UubNm3fC338LIYQYOuQ7YyGEECLOpGd8grZu3cr111/faZvdbuf999+PU42EEEIMNRLGJ2jatGls3rw53tUQQggxhMkwtRBCCBFnEsZCCCFEnEkYCyGEEHEmYSyEEELEmYTxCTja9YyFEEKI/pIwTgLBYDDeVRBCCHECEvanTRU//Sm+nf2/nnEwFKK2j+sZ2ydNJO8HP+h1/0Bez7i5uZnFixf3eLvly5fzy1/+EqUU06dP509/+hOVlZXccsst7Nu3j3A4zOOPP05BQQFXXnll9Exev/zlL2lubua+++5j/vz5nHbaaaxbt45rr72WCRMm8MADD+D3+8nKyuLZZ58lNzeX5uZmbrvtNjZu3IhSinvvvZeGhga2bNnCb3/7WwD+8Ic/sGPHDn7zm9/0508thBBigCVsGMfDQF7P2OFw8PLLL3e73Y4dO3jggQd49913yc7Opra2FoDbb7+d888/n5dffpn6+nqUUtTV1R31Mfx+Pxs3bgSgrq6O9957D6UUTz75JL/4xS/41a9+xY9//GPS0tKip/isq6vDarXyk5/8hIceegir1crTTz/N448/fqJ/PiGEEMcpYcP4aD3YniTa9Yy11vzgBz/odrvVq1ezZMkSsrOzAcjMzARg9erVLF++HACz2YzH4+kzjJcuXRpdLisrY+nSpRw+fBi/38+YMWMAWLVqFStWrIiWy8jIAOCCCy7gtddeY9KkSQQCAaZNm3aMfy0hhBADJWHDOF7ar2dcUVHR7XrGVquV4uLifl3P+HhvF8tisRAOh6PrXW+fkpISXb7tttu44447WLRoESUlJdx3331Hve9vfOMb/PSnP2XixIksW7bsmOolhBBiYMkBXF0sXbqUFStW8OKLL7JkyRIaGhqO63rGvd3uggsu4IUXXqCmpgYgOkx94YUXRi+XGAqFaGhoIDc3lyNHjlBTU4PP5+O111476uONHDkSgGeeeSa6/eKLL+aRRx6Jrrf3ts844wxKS0t57rnnuPbaa/v75xFCCHESSBh30dP1jDdu3Mi0adNYvnx5v69n3NvtpkyZwt13383555/PjBkzuOOOOwD43e9+x5o1a5g2bRrnnXceO3bswGq18qMf/Yi5c+dy8cUXH/Wx77vvPpYsWcLpp58eHQIHuOeee6irq2Pq1KnMmDGDNWvWRPd98Ytf5Oyzz44OXQshhIgPGabuwUBcz/hot7vhhhu44YYbOm3Lzc3lr3/9K9D5++/bb7+d22+/vdt9lJSUdFpfvHhxj0d5u93uTj3lWOvWreO73/1ur20QQggxOKRnPAzV19czYcIEnE4nF154YbyrI4QQw570jE/QULyecXp6Onv27Il3NYQQQkRIGJ8guZ6xEEKIE5Vww9Ra63hXQUTIcyGEEIMjocLY4XBQU1MjIZAAtNbU1NTgcDjiXRUhhEh6CTVMXVhYSFlZGVVVVcd8W6/XmzTBkShtcTgcFBYWxrsaQgiR9PoVxkqphcDvADPwpNb6513224HlwOlADbBUa73/WCtjtVqjp3E8ViUlJcycOfO4bptokqktQggh+tbnMLVSygw8AlwGTAauVUpN7lLs60Cd1noc8BvgwYGuqBBCCJGs+vOd8Vxgr9Z6n9baD6wAup5dYjHQfmaJF4ELVV+XNRJCCCEE0L8wHgmUxqyXRbb1WEZrHQQagKyBqKAQQgiR7Ab1AC6l1M3AzZHVZqXU7gG8+2ygegDvL56kLYkpWdqSLO0AaUsiSpZ2wMC3ZXRvO/oTxuVAUcx6YWRbT2XKlFIWIA3jQK5OtNZPAE/04zGPmVJqo9Z69sm478EmbUlMydKWZGkHSFsSUbK0Awa3Lf0Zpt4AjFdKjVFK2YAvAa92KfMq0H7lg2uA1Vp+LCyEEEL0S589Y611UCl1K/A2xk+bntJab1dK3Q9s1Fq/Cvw/4E9Kqb1ALUZgCyGEEKIf+vWdsdb6DeCNLtt+FLPsBZYMbNWO2UkZ/o4TaUtiSpa2JEs7QNqSiJKlHTCIbVEymiyEEELEV0Kdm1oIIYQYjoZcGCulFiqldiul9iql7uphv10p9ZfI/veVUsVxqGaflFJFSqk1SqkdSqntSql/76HMfKVUg1Jqc2T6UU/3lQiUUvuVUlsj9dzYw36llHo48rxsUUrNikc9j0YpdWrM33qzUqpRKfWdLmUS9jlRSj2llDqilNoWsy1TKfWOUuqTyDyjl9veECnziVLqhp7KDKZe2vKQUmpX5P/nZaVUei+3Per/4mDrpS33KaXKY/6PLu/ltkd9vxtMvbTjLzFt2K+U2tzLbRPtOenx/Teurxet9ZCZMA4g+xQYC9iAj4HJXcp8C3gssvwl4C/xrncvbckHZkWWPcCeHtoyH3gt3nXtZ3v2A9lH2X858CaggDOB9+Nd5z7aYwYqgNFD5TkBzgNmAdtitv0CuCuyfBfwYA+3ywT2ReYZkeWMBGzLJYAlsvxgT22J7Dvq/2KCtOU+4D/6uF2f73fxbkeX/b8CfjREnpMe33/j+XoZaj3jpDk1p9b6sNZ6U2S5CdhJ9zObJZPFwHJteA9IV0rlx7tSR3Eh8KnW+kC8K9JfWuu1GL9miBX7engGuKqHm14KvKO1rtVa1wHvAAtPVj37o6e2aK3/po0z/AG8h3HOg4TXy/PSH/15vxs0R2tH5D32i8Dzg1qp43SU99+4vV6GWhgn5ak5I0PpM4H3e9h9llLqY6XUm0qpKYNbs2Oigb8ppT5UxpnWuurPc5dIvkTvbyxD5TkByNVaH44sVwC5PZQZas8NwNcwRlp60tf/YqK4NTLk/lQvw6FD6Xk5F6jUWn/Sy/6EfU66vP/G7fUy1MI46Sil3MBLwHe01o1ddm/CGCadAfw38MogV+9YnKO1noVxda9vK6XOi3eFjpcyTm6zCHihh91D6TnpRBtjbEP+5xNKqbuBIPBsL0WGwv/io8ApwGnAYYwh3qHsWo7eK07I5+Ro77+D/XoZamF8LKfmRB3l1JyJQCllxfhHeFZr/b9d92utG7XWzZHlNwCrUip7kKvZL1rr8sj8CPAyxhBbrP48d4niMmCT1rqy646h9JxEVLZ/HRCZH+mhzJB5bpRSNwJXAl+OvFl204//xbjTWldqrUNa6zDwB3qu45B4XiLvs18A/tJbmUR8Tnp5/43b62WohXHSnJoz8h3L/wN2aq1/3UuZvPbvu5VSczGer4T7YKGUSlFKedqXMQ602dal2KvAV5XhTKAhZjgo0fT6KX+oPCcxYl8PNwB/7aHM28AlSqmMyHDpJZFtCUUptRC4E1iktW7tpUx//hfjrsvxEp+n5zr25/0uEVwE7NJal/W0MxGfk6O8/8bv9RLvo9qOdcI4KncPxlGGd0e23Y/xAgVwYAwv7gU+AMbGu869tOMcjCGQLcDmyHQ5cAtwS6TMrcB2jKMo3wPmxbvevbRlbKSOH0fq2/68xLZFAY9EnretwOx417uXtqRghGtazLYh8ZxgfIA4DAQwvsf6OsbxEn8HPgFWAZmRsrOBJ2Nu+7XIa2YvsCxB27IX47u69tdL+68mCoA3jva/mIBt+VPkdbAFIwDyu7Ylst7t/S6R2hHZ/sf210dM2UR/Tnp7/43b60XOwCWEEELE2VAbphZCCCGSjoSxEEIIEWcSxkIIIUScSRgLIYQQcSZhLIQQQsSZhLEQQggRZxLGQgghRJxJGAshhBBx9v8DLugEJQqW+OQAAAAASUVORK5CYII=\n",
- "text/plain": [
- "<Figure size 576x360 with 1 Axes>"
- ]
- },
- "metadata": {
- "needs_background": "light"
- },
- "output_type": "display_data"
- }
- ],
- "source": [
- "def plot_learning_curves(history):\n",
- " pd.DataFrame(history.history).plot(figsize=(8, 5))\n",
- " plt.grid(True)\n",
- " plt.gca().set_ylim(0, 1)\n",
- " plt.show()\n",
- "\n",
- "plot_learning_curves(history)\n",
- "\n",
- "# 1. 参数众多,训练不充分\n",
- "# 2. 梯度消失 -> 链式法则 -> 复合函数f(g(x))\n",
- "# selu缓解梯度消失"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "[0.3932817578315735, 0.8611999750137329]"
- ]
- },
- "execution_count": 11,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "model.evaluate(x_test_scaled, y_test, verbose=0)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.9"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
- }
|