From 3feff99550b49945448e4acd112cc86982716e29 Mon Sep 17 00:00:00 2001 From: LukeBolly Date: Sat, 9 Oct 2021 20:19:12 +0800 Subject: [PATCH] Extending test to use model and demonstrate memory leak (#858) * adding saved model cleanup benchmark * less loops so it doesnt waste so much time * testing with a running model * actually calling the model * stopping test from running immediately --- .../Leak/SavedModelCleanup.cs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/src/TensorFlowNet.Benchmarks/Leak/SavedModelCleanup.cs b/src/TensorFlowNet.Benchmarks/Leak/SavedModelCleanup.cs index c060c5ed..09e20058 100644 --- a/src/TensorFlowNet.Benchmarks/Leak/SavedModelCleanup.cs +++ b/src/TensorFlowNet.Benchmarks/Leak/SavedModelCleanup.cs @@ -6,6 +6,7 @@ using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; +using Tensorflow.NumPy; using static Tensorflow.Binding; namespace Tensorflow.Benchmark.Leak @@ -23,9 +24,15 @@ namespace Tensorflow.Benchmark.Leak for (var i = 0; i < 1024; i++) { - using var sess = Session.LoadFromSavedModel(ClassifierModelPath); - // destory graph - using var g = sess.graph; + using (var sess = Session.LoadFromSavedModel(ClassifierModelPath)) { + using (var g = sess.graph.as_default()) { + var inputOp = g.OperationByName("inference_input"); + var outputOp = g.OperationByName("StatefulPartitionedCall"); + + var inp = np.zeros(new Shape(new int[] { 1, 2, 96 }), TF_DataType.TF_FLOAT); + sess.run(outputOp.outputs[0], new FeedItem(inputOp.outputs[0], inp)); + } + } } } }