Browse Source

Refine the keras SavedModel unittest.

tags/v0.100.5-BERT-load
Yaohui Liu Haiping 2 years ago
parent
commit
095bf33d7c
2 changed files with 45 additions and 57 deletions
  1. +6
    -16
      test/TensorFlowNET.Keras.UnitTest/SaveModel/SequentialModelLoad.cs
  2. +39
    -41
      test/TensorFlowNET.Keras.UnitTest/SaveModel/SequentialModelSave.cs

+ 6
- 16
test/TensorFlowNET.Keras.UnitTest/SaveModel/SequentialModelLoad.cs View File

@@ -1,20 +1,10 @@
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq; using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Tensorflow.Keras.Engine;
using Tensorflow.Keras.Saving.SavedModel;
using Tensorflow.Keras.Losses;
using Tensorflow.Keras.Metrics;
using Tensorflow; using Tensorflow;
using Tensorflow.Keras.Optimizers; using Tensorflow.Keras.Optimizers;
using static Tensorflow.KerasApi;
using Tensorflow.NumPy;
using Tensorflow.Keras.UnitTest.Helpers; using Tensorflow.Keras.UnitTest.Helpers;
using static TensorFlowNET.Keras.UnitTest.SaveModel.SequentialModelSave;
using Tensorflow.NumPy;
using static Tensorflow.Binding;


namespace TensorFlowNET.Keras.UnitTest.SaveModel; namespace TensorFlowNET.Keras.UnitTest.SaveModel;


@@ -24,10 +14,10 @@ public class SequentialModelLoad
[TestMethod] [TestMethod]
public void SimpleModelFromAutoCompile() public void SimpleModelFromAutoCompile()
{ {
var model = keras.models.load_model(@"Assets/simple_model_from_auto_compile");
var model = tf.keras.models.load_model(@"Assets/simple_model_from_auto_compile");
model.summary(); model.summary();


model.compile(new Adam(0.0001f), new LossesApi().SparseCategoricalCrossentropy(), new string[] { "accuracy" });
model.compile(new Adam(0.0001f), tf.keras.losses.SparseCategoricalCrossentropy(), new string[] { "accuracy" });


// check the weights // check the weights
var kernel1 = np.load(@"Assets/simple_model_from_auto_compile/kernel1.npy"); var kernel1 = np.load(@"Assets/simple_model_from_auto_compile/kernel1.npy");
@@ -54,10 +44,10 @@ public class SequentialModelLoad
public void AlexnetFromSequential() public void AlexnetFromSequential()
{ {
new SequentialModelSave().AlexnetFromSequential(); new SequentialModelSave().AlexnetFromSequential();
var model = keras.models.load_model(@"./alexnet_from_sequential");
var model = tf.keras.models.load_model(@"./alexnet_from_sequential");
model.summary(); model.summary();


model.compile(new Adam(0.001f), new LossesApi().SparseCategoricalCrossentropy(from_logits: true), new string[] { "accuracy" });
model.compile(new Adam(0.001f), tf.keras.losses.SparseCategoricalCrossentropy(from_logits: true), new string[] { "accuracy" });


var num_epochs = 1; var num_epochs = 1;
var batch_size = 8; var batch_size = 8;


+ 39
- 41
test/TensorFlowNET.Keras.UnitTest/SaveModel/SequentialModelSave.cs View File

@@ -1,10 +1,8 @@
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using Tensorflow; using Tensorflow;
using Tensorflow.Keras; using Tensorflow.Keras;
using Tensorflow.Keras.Engine; using Tensorflow.Keras.Engine;
using Tensorflow.Keras.Losses;
using Tensorflow.Keras.Optimizers; using Tensorflow.Keras.Optimizers;
using Tensorflow.Keras.UnitTest.Helpers; using Tensorflow.Keras.UnitTest.Helpers;
using static Tensorflow.Binding; using static Tensorflow.Binding;
@@ -18,15 +16,15 @@ public class SequentialModelSave
[TestMethod] [TestMethod]
public void SimpleModelFromAutoCompile() public void SimpleModelFromAutoCompile()
{ {
var inputs = keras.layers.Input((28, 28, 1));
var x = keras.layers.Flatten().Apply(inputs);
x = keras.layers.Dense(100, activation: tf.nn.relu).Apply(x);
x = keras.layers.Dense(units: 10).Apply(x);
var outputs = keras.layers.Softmax(axis: 1).Apply(x);
var model = keras.Model(inputs, outputs);
model.compile(new Adam(0.001f),
keras.losses.SparseCategoricalCrossentropy(),
var inputs = tf.keras.layers.Input((28, 28, 1));
var x = tf.keras.layers.Flatten().Apply(inputs);
x = tf.keras.layers.Dense(100, activation: tf.nn.relu).Apply(x);
x = tf.keras.layers.Dense(units: 10).Apply(x);
var outputs = tf.keras.layers.Softmax(axis: 1).Apply(x);
var model = tf.keras.Model(inputs, outputs);
model.compile(new Adam(0.001f),
tf.keras.losses.SparseCategoricalCrossentropy(),
new string[] { "accuracy" }); new string[] { "accuracy" });


var data_loader = new MnistModelLoader(); var data_loader = new MnistModelLoader();
@@ -48,18 +46,18 @@ public class SequentialModelSave
[TestMethod] [TestMethod]
public void SimpleModelFromSequential() public void SimpleModelFromSequential()
{ {
Model model = KerasApi.keras.Sequential(new List<ILayer>()
Model model = keras.Sequential(new List<ILayer>()
{ {
keras.layers.InputLayer((28, 28, 1)),
keras.layers.Flatten(),
keras.layers.Dense(100, "relu"),
keras.layers.Dense(10),
keras.layers.Softmax()
tf.keras.layers.InputLayer((28, 28, 1)),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(100, "relu"),
tf.keras.layers.Dense(10),
tf.keras.layers.Softmax()
}); });


model.summary(); model.summary();


model.compile(new Adam(0.001f), new LossesApi().SparseCategoricalCrossentropy(), new string[] { "accuracy" });
model.compile(new Adam(0.001f), tf.keras.losses.SparseCategoricalCrossentropy(), new string[] { "accuracy" });


var data_loader = new MnistModelLoader(); var data_loader = new MnistModelLoader();
var num_epochs = 1; var num_epochs = 1;
@@ -80,39 +78,39 @@ public class SequentialModelSave
[TestMethod] [TestMethod]
public void AlexnetFromSequential() public void AlexnetFromSequential()
{ {
Model model = KerasApi.keras.Sequential(new List<ILayer>()
Model model = keras.Sequential(new List<ILayer>()
{ {
keras.layers.InputLayer((227, 227, 3)),
keras.layers.Conv2D(96, (11, 11), (4, 4), activation:"relu", padding:"valid"),
keras.layers.BatchNormalization(),
keras.layers.MaxPooling2D((3, 3), strides:(2, 2)),
tf.keras.layers.InputLayer((227, 227, 3)),
tf.keras.layers.Conv2D(96, (11, 11), (4, 4), activation:"relu", padding:"valid"),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D((3, 3), strides:(2, 2)),


keras.layers.Conv2D(256, (5, 5), (1, 1), "same", activation: "relu"),
keras.layers.BatchNormalization(),
keras.layers.MaxPooling2D((3, 3), (2, 2)),
tf.keras.layers.Conv2D(256, (5, 5), (1, 1), "same", activation: "relu"),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D((3, 3), (2, 2)),


keras.layers.Conv2D(384, (3, 3), (1, 1), "same", activation: "relu"),
keras.layers.BatchNormalization(),
tf.keras.layers.Conv2D(384, (3, 3), (1, 1), "same", activation: "relu"),
tf.keras.layers.BatchNormalization(),


keras.layers.Conv2D(384, (3, 3), (1, 1), "same", activation: "relu"),
keras.layers.BatchNormalization(),
tf.keras.layers.Conv2D(384, (3, 3), (1, 1), "same", activation: "relu"),
tf.keras.layers.BatchNormalization(),


keras.layers.Conv2D(256, (3, 3), (1, 1), "same", activation: "relu"),
keras.layers.BatchNormalization(),
keras.layers.MaxPooling2D((3, 3), (2, 2)),
tf.keras.layers.Conv2D(256, (3, 3), (1, 1), "same", activation: "relu"),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D((3, 3), (2, 2)),


keras.layers.Flatten(),
keras.layers.Dense(4096, activation: "relu"),
keras.layers.Dropout(0.5f),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(4096, activation: "relu"),
tf.keras.layers.Dropout(0.5f),


keras.layers.Dense(4096, activation: "relu"),
keras.layers.Dropout(0.5f),
tf.keras.layers.Dense(4096, activation: "relu"),
tf.keras.layers.Dropout(0.5f),


keras.layers.Dense(1000, activation: "linear"),
keras.layers.Softmax(1)
tf.keras.layers.Dense(1000, activation: "linear"),
tf.keras.layers.Softmax(1)
}); });


model.compile(new Adam(0.001f), new LossesApi().SparseCategoricalCrossentropy(from_logits: true), new string[] { "accuracy" });
model.compile(new Adam(0.001f), tf.keras.losses.SparseCategoricalCrossentropy(from_logits: true), new string[] { "accuracy" });


var num_epochs = 1; var num_epochs = 1;
var batch_size = 8; var batch_size = 8;


Loading…
Cancel
Save