Browse Source

Extending test to use model and demonstrate memory leak (#858)

* adding saved model cleanup benchmark

* less loops so it doesnt waste so much time

* testing with a running model

* actually calling the model

* stopping test from running immediately
tags/TensorFlowOpLayer
LukeBolly GitHub 4 years ago
parent
commit
3feff99550
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 10 additions and 3 deletions
  1. +10
    -3
      src/TensorFlowNet.Benchmarks/Leak/SavedModelCleanup.cs

+ 10
- 3
src/TensorFlowNet.Benchmarks/Leak/SavedModelCleanup.cs View File

@@ -6,6 +6,7 @@ using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using Tensorflow.NumPy;
using static Tensorflow.Binding;

namespace Tensorflow.Benchmark.Leak
@@ -23,9 +24,15 @@ namespace Tensorflow.Benchmark.Leak

for (var i = 0; i < 1024; i++)
{
using var sess = Session.LoadFromSavedModel(ClassifierModelPath);
// destory graph
using var g = sess.graph;
using (var sess = Session.LoadFromSavedModel(ClassifierModelPath)) {
using (var g = sess.graph.as_default()) {
var inputOp = g.OperationByName("inference_input");
var outputOp = g.OperationByName("StatefulPartitionedCall");

var inp = np.zeros(new Shape(new int[] { 1, 2, 96 }), TF_DataType.TF_FLOAT);
sess.run(outputOp.outputs[0], new FeedItem(inputOp.outputs[0], inp));
}
}
}
}
}


Loading…
Cancel
Save