Browse Source

Add RepeatDataSetCrash test.

tags/v0.60-tf.numpy
Oceania2018 4 years ago
parent
commit
8048b6258a
7 changed files with 47 additions and 7 deletions
  1. +5
    -1
      src/TensorFlowNET.Core/Data/DatasetV2.cs
  2. +1
    -1
      src/TensorFlowNET.Keras/Engine/Layer.cs
  3. +3
    -3
      src/TensorFlowNET.Keras/Engine/Model.cs
  4. +3
    -0
      src/TensorFlowNET.Keras/Utils/Compress.cs
  5. +28
    -0
      src/TensorFlowNet.Benchmarks/Crash/RepeatDataSetCrash.cs
  6. +4
    -0
      src/TensorFlowNet.Benchmarks/Program.cs
  7. +3
    -2
      src/TensorFlowNet.Benchmarks/Tensorflow.Benchmark.csproj

+ 5
- 1
src/TensorFlowNET.Core/Data/DatasetV2.cs View File

@@ -25,6 +25,8 @@ namespace Tensorflow


public TensorSpec[] element_spec => structure; public TensorSpec[] element_spec => structure;


public int length => cardinality().numpy();

public IDatasetV2 cache(string filename = "") public IDatasetV2 cache(string filename = "")
=> new CacheDataset(this, filename: filename); => new CacheDataset(this, filename: filename);


@@ -136,7 +138,9 @@ namespace Tensorflow
=> tf.Context.ExecuteOp("DatasetCardinality", name, new ExecuteOpArgs(variant_tensor)); => tf.Context.ExecuteOp("DatasetCardinality", name, new ExecuteOpArgs(variant_tensor));


public override string ToString() public override string ToString()
=> $"{GetType().Name} shapes: {string.Join(", ", structure.Select(x => x.shape))}, types: {string.Join(", ", structure.Select(x => "tf." + x.dtype.as_numpy_name()))}";
=> $"{GetType().Name} shapes: {string.Join(", ", structure.Select(x => x.shape))}, " +
$"types: {string.Join(", ", structure.Select(x => "tf." + x.dtype.as_numpy_name()))}, " +
$"len: {length}";


public IEnumerator<(Tensor, Tensor)> GetEnumerator() public IEnumerator<(Tensor, Tensor)> GetEnumerator()
{ {


+ 1
- 1
src/TensorFlowNET.Keras/Engine/Layer.cs View File

@@ -158,7 +158,7 @@ namespace Tensorflow.Keras.Engine
/// <returns></returns> /// <returns></returns>
protected virtual Tensors Call(Tensors inputs, Tensor state = null, bool? training = null) protected virtual Tensors Call(Tensors inputs, Tensor state = null, bool? training = null)
{ {
throw new NotImplementedException("");
return inputs;
} }


protected virtual string _name_scope() protected virtual string _name_scope()


+ 3
- 3
src/TensorFlowNET.Keras/Engine/Model.cs View File

@@ -57,15 +57,15 @@ namespace Tensorflow.Keras.Engine


void _init_batch_counters() void _init_batch_counters()
{ {
_train_counter = tf.Variable(0,
_train_counter = tf.Variable(0L,
dtype: TF_DataType.TF_INT64, dtype: TF_DataType.TF_INT64,
aggregation: VariableAggregation.OnlyFirstReplica); aggregation: VariableAggregation.OnlyFirstReplica);


_test_counter = tf.Variable(0,
_test_counter = tf.Variable(0L,
dtype: TF_DataType.TF_INT64, dtype: TF_DataType.TF_INT64,
aggregation: VariableAggregation.OnlyFirstReplica); aggregation: VariableAggregation.OnlyFirstReplica);


_predict_counter = tf.Variable(0,
_predict_counter = tf.Variable(0L,
dtype: TF_DataType.TF_INT64, dtype: TF_DataType.TF_INT64,
aggregation: VariableAggregation.OnlyFirstReplica); aggregation: VariableAggregation.OnlyFirstReplica);
} }


+ 3
- 0
src/TensorFlowNET.Keras/Utils/Compress.cs View File

@@ -53,6 +53,9 @@ namespace Tensorflow.Keras.Utils
var flag = gzArchiveName.Split(Path.DirectorySeparatorChar).Last().Split('.').First() + ".bin"; var flag = gzArchiveName.Split(Path.DirectorySeparatorChar).Last().Split('.').First() + ".bin";
if (File.Exists(Path.Combine(destFolder, flag))) return; if (File.Exists(Path.Combine(destFolder, flag))) return;


var destFileName = gzArchiveName.Replace(".zip", string.Empty);
if (File.Exists(destFileName)) return;

Binding.tf_output_redirect.WriteLine($"Extracting."); Binding.tf_output_redirect.WriteLine($"Extracting.");
var task = Task.Run(() => var task = Task.Run(() =>
{ {


+ 28
- 0
src/TensorFlowNet.Benchmarks/Crash/RepeatDataSetCrash.cs View File

@@ -0,0 +1,28 @@
using BenchmarkDotNet.Attributes;
using System;
using System.Collections.Generic;
using NumSharp;
using static Tensorflow.Binding;
using static Tensorflow.KerasApi;

namespace Tensorflow.Benchmark.Crash
{
public class RepeatDataSetCrash
{
[Benchmark]
public void Run()
{
var data = tf.convert_to_tensor(np.arange(0, 50000 * 10).astype(np.float32).reshape(50000, 10));

var dataset = keras.preprocessing.timeseries_dataset_from_array(data,
sequence_length: 10,
sequence_stride: 1,
shuffle: false,
batch_size: 32);

while (true)
foreach (var d in dataset)
;
}
}
}

+ 4
- 0
src/TensorFlowNet.Benchmarks/Program.cs View File

@@ -2,7 +2,9 @@
using BenchmarkDotNet.Running; using BenchmarkDotNet.Running;
using System; using System;
using System.Reflection; using System.Reflection;
using Tensorflow.Benchmark.Crash;
using Tensorflow.Benchmark.Leak; using Tensorflow.Benchmark.Leak;
using static Tensorflow.Binding;


namespace TensorFlowBenchmark namespace TensorFlowBenchmark
{ {
@@ -10,6 +12,8 @@ namespace TensorFlowBenchmark
{ {
static void Main(string[] args) static void Main(string[] args)
{ {
print(tf.VERSION);
new RepeatDataSetCrash().Run();
new GpuLeakByCNN().Run(); new GpuLeakByCNN().Run();


if (args?.Length > 0) if (args?.Length > 0)


+ 3
- 2
src/TensorFlowNet.Benchmarks/Tensorflow.Benchmark.csproj View File

@@ -14,6 +14,7 @@


<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'"> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks> <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<DefineConstants>DEBUG;TRACE</DefineConstants>
</PropertyGroup> </PropertyGroup>


<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'"> <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
@@ -35,8 +36,8 @@
</ItemGroup> </ItemGroup>


<ItemGroup> <ItemGroup>
<PackageReference Include="BenchmarkDotNet" Version="0.12.1" />
<PackageReference Include="SciSharp.TensorFlow.Redist-Windows-GPU" Version="2.5.0" />
<PackageReference Include="BenchmarkDotNet" Version="0.13.0" />
<PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.5.0" />
</ItemGroup> </ItemGroup>


<ItemGroup> <ItemGroup>


Loading…
Cancel
Save