Browse Source

Seperated Keras from binding project.

tags/v0.30
Oceania2018 4 years ago
parent
commit
41df785232
100 changed files with 327 additions and 1710 deletions
  1. +26
    -0
      TensorFlow.NET.sln
  2. +0
    -1
      src/TensorFlowNET.Console/Program.cs
  3. +1
    -1
      src/TensorFlowNET.Console/TensorFlowNET.Console.csproj
  4. +0
    -2
      src/TensorFlowNET.Core/APIs/tf.nn.cs
  5. +0
    -42
      src/TensorFlowNET.Core/APIs/tf.optimizers.cs
  6. +2
    -3
      src/TensorFlowNET.Core/APIs/tf.train.cs
  7. +1
    -1
      src/TensorFlowNET.Core/Keras/ArgsDefinition/DataHandlerArgs.cs
  8. +1
    -1
      src/TensorFlowNET.Core/Keras/ArgsDefinition/NodeArgs.cs
  9. +1
    -1
      src/TensorFlowNET.Core/Keras/ArgsDefinition/SequentialArgs.cs
  10. +1
    -1
      src/TensorFlowNET.Core/Keras/ArgsDefinition/TensorLikeDataAdapterArgs.cs
  11. +0
    -74
      src/TensorFlowNET.Core/Keras/Datasets/Mnist.cs
  12. +0
    -10
      src/TensorFlowNET.Core/Keras/Engine/CallContext.cs
  13. +1
    -1
      src/TensorFlowNET.Core/Keras/Engine/IModel.cs
  14. +16
    -0
      src/TensorFlowNET.Core/Keras/Engine/INode.cs
  15. +4
    -6
      src/TensorFlowNET.Core/Keras/Engine/KerasHistory.cs
  16. +0
    -121
      src/TensorFlowNET.Core/Keras/Engine/Node.cs
  17. +0
    -134
      src/TensorFlowNET.Core/Keras/Engine/Sequential.cs
  18. +20
    -0
      src/TensorFlowNET.Core/Keras/Layers/ILayer.cs
  19. +0
    -41
      src/TensorFlowNET.Core/Keras/Losses/Loss.cs
  20. +0
    -12
      src/TensorFlowNET.Core/Keras/Losses/LossFunctionWrapper.cs
  21. +0
    -33
      src/TensorFlowNET.Core/Keras/Losses/SparseCategoricalCrossentropy.cs
  22. +0
    -14
      src/TensorFlowNET.Core/Keras/Metrics/Mean.cs
  23. +0
    -27
      src/TensorFlowNET.Core/Keras/Metrics/MeanMetricWrapper.cs
  24. +0
    -62
      src/TensorFlowNET.Core/Keras/Metrics/Metric.cs
  25. +0
    -74
      src/TensorFlowNET.Core/Keras/Metrics/Reduce.cs
  26. +0
    -6
      src/TensorFlowNET.Core/Keras/Metrics/Sum.cs
  27. +0
    -210
      src/TensorFlowNET.Core/Layers/Layer.cs
  28. +1
    -1
      src/TensorFlowNET.Core/Operations/NnOps/BasicLSTMCell.cs
  29. +1
    -1
      src/TensorFlowNET.Core/Operations/NnOps/BasicRNNCell.cs
  30. +1
    -1
      src/TensorFlowNET.Core/Operations/NnOps/ConvolutionInternal.cs
  31. +151
    -3
      src/TensorFlowNET.Core/Operations/NnOps/LayerRNNCell.cs
  32. +28
    -5
      src/TensorFlowNET.Core/Operations/NnOps/RNNCell.cs
  33. +2
    -2
      src/TensorFlowNET.Core/Operations/NnOps/rnn.cs
  34. +1
    -1
      src/TensorFlowNET.Core/Operations/nn_ops.cs
  35. +0
    -4
      src/TensorFlowNET.Core/Tensorflow.Binding.csproj
  36. +0
    -10
      src/TensorFlowNET.Keras/Activations.cs
  37. +0
    -0
      src/TensorFlowNET.Keras/Activations/Activations.Linear.cs
  38. +0
    -0
      src/TensorFlowNET.Keras/Activations/Activations.Relu.cs
  39. +0
    -0
      src/TensorFlowNET.Keras/Activations/Activations.Sigmoid.cs
  40. +0
    -0
      src/TensorFlowNET.Keras/Activations/Activations.Tanh.cs
  41. +0
    -35
      src/TensorFlowNET.Keras/Applications/Densenet.cs
  42. +0
    -60
      src/TensorFlowNET.Keras/Applications/Efficientnet.cs
  43. +0
    -22
      src/TensorFlowNET.Keras/Applications/ImagenetUtils.cs
  44. +0
    -22
      src/TensorFlowNET.Keras/Applications/InceptionResnetV2.cs
  45. +0
    -19
      src/TensorFlowNET.Keras/Applications/InceptionV3.cs
  46. +0
    -18
      src/TensorFlowNET.Keras/Applications/Mobilenet.cs
  47. +0
    -21
      src/TensorFlowNET.Keras/Applications/MobilenetV2.cs
  48. +0
    -31
      src/TensorFlowNET.Keras/Applications/Nasnet.cs
  49. +0
    -41
      src/TensorFlowNET.Keras/Applications/Resnet.cs
  50. +0
    -25
      src/TensorFlowNET.Keras/Applications/ResnetV2.cs
  51. +0
    -17
      src/TensorFlowNET.Keras/Applications/Vgg16.cs
  52. +0
    -17
      src/TensorFlowNET.Keras/Applications/Vgg19.cs
  53. +0
    -17
      src/TensorFlowNET.Keras/Applications/Xception.cs
  54. +0
    -29
      src/TensorFlowNET.Keras/Args.cs
  55. +0
    -10
      src/TensorFlowNET.Keras/Backend.cs
  56. +0
    -0
      src/TensorFlowNET.Keras/BackendBase.cs
  57. +0
    -10
      src/TensorFlowNET.Keras/BackendConfig.cs
  58. +0
    -0
      src/TensorFlowNET.Keras/BackendImpl.cs
  59. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/BaseLogger.cs
  60. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/CSVLogger.cs
  61. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/Callback.cs
  62. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/CallbackList.cs
  63. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/EarlyStopping.cs
  64. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/History.cs
  65. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/LambdaCallback.cs
  66. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/LearningRateScheduler.cs
  67. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/ModelCheckpoint.cs
  68. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/ProgbarLogger.cs
  69. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/ReduceLROnPlateau.cs
  70. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/RemoteMonitor.cs
  71. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/TensorBoard.cs
  72. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/TensorBoardV1.cs
  73. +0
    -10
      src/TensorFlowNET.Keras/Callbacks/TerminateOnNaN.cs
  74. +0
    -10
      src/TensorFlowNET.Keras/Constraints/ConstraintBase.cs
  75. +0
    -10
      src/TensorFlowNET.Keras/Constraints/MaxNorm.cs
  76. +0
    -10
      src/TensorFlowNET.Keras/Constraints/MinMaxNorm.cs
  77. +0
    -10
      src/TensorFlowNET.Keras/Constraints/NonNeg.cs
  78. +0
    -10
      src/TensorFlowNET.Keras/Constraints/RadialConstraint.cs
  79. +0
    -10
      src/TensorFlowNET.Keras/Constraints/UnitNorm.cs
  80. +0
    -13
      src/TensorFlowNET.Keras/Core.cs
  81. +0
    -11
      src/TensorFlowNET.Keras/Datasets/BostonHousing.cs
  82. +0
    -11
      src/TensorFlowNET.Keras/Datasets/Cifar.cs
  83. +0
    -11
      src/TensorFlowNET.Keras/Datasets/Cifar10.cs
  84. +0
    -11
      src/TensorFlowNET.Keras/Datasets/Cifar100.cs
  85. +0
    -0
      src/TensorFlowNET.Keras/Datasets/DatasetPass.cs
  86. +0
    -11
      src/TensorFlowNET.Keras/Datasets/FashionMNIST.cs
  87. +0
    -15
      src/TensorFlowNET.Keras/Datasets/IMDB.cs
  88. +0
    -0
      src/TensorFlowNET.Keras/Datasets/KerasDataset.cs
  89. +68
    -5
      src/TensorFlowNET.Keras/Datasets/MNIST.cs
  90. +0
    -12
      src/TensorFlowNET.Keras/Datasets/Reuters.cs
  91. +0
    -10
      src/TensorFlowNET.Keras/Distribute/DistributedTrainingUtils.cs
  92. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasCorrectnessTestBase.cs
  93. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasDnnCorrectnessTest.cs
  94. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasEmbeddingModelCorrectnessTest.cs
  95. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasImageModelCorrectnessTest.cs
  96. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasOptimizerV2Test.cs
  97. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasPremadeModelsTest.cs
  98. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasRnnModelCorrectnessTest.cs
  99. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasStatefulLstmModelCorrectnessTest.cs
  100. +0
    -10
      src/TensorFlowNET.Keras/Distribute/KerasUtilsTest.cs

+ 26
- 0
TensorFlow.NET.sln View File

@@ -11,6 +11,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tensorflow.UnitTest", "test
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TensorFlowNET.Console", "src\TensorFlowNET.Console\TensorFlowNET.Console.csproj", "{03F06299-3F4B-4449-A709-3A647657BC0C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tensorflow.Keras", "src\TensorFlowNET.Keras\Tensorflow.Keras.csproj", "{49D71826-C03D-4FA7-9BAC-22C1327E65CF}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -123,6 +125,30 @@ Global
{03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x64.Build.0 = Release|Any CPU
{03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x86.ActiveCfg = Release|Any CPU
{03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x86.Build.0 = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|Any CPU.Build.0 = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x64.ActiveCfg = Debug|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x64.Build.0 = Debug|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x86.ActiveCfg = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x86.Build.0 = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|Any CPU.ActiveCfg = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|Any CPU.Build.0 = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x64.ActiveCfg = Debug|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x64.Build.0 = Debug|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x86.ActiveCfg = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x86.Build.0 = Debug|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|Any CPU.ActiveCfg = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|Any CPU.Build.0 = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x64.ActiveCfg = Debug|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x64.Build.0 = Debug|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x86.ActiveCfg = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x86.Build.0 = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|Any CPU.ActiveCfg = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|Any CPU.Build.0 = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x64.ActiveCfg = Release|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x64.Build.0 = Release|x64
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x86.ActiveCfg = Release|Any CPU
{49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE


+ 0
- 1
src/TensorFlowNET.Console/Program.cs View File

@@ -1,5 +1,4 @@
using System;
using static Tensorflow.Binding;

namespace Tensorflow
{


+ 1
- 1
src/TensorFlowNET.Console/TensorFlowNET.Console.csproj View File

@@ -12,7 +12,7 @@
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\TensorFlowNET.Core\Tensorflow.Binding.csproj" />
<ProjectReference Include="..\TensorFlowNET.Keras\Tensorflow.Keras.csproj" />
</ItemGroup>

</Project>

+ 0
- 2
src/TensorFlowNET.Core/APIs/tf.nn.cs View File

@@ -137,8 +137,6 @@ namespace Tensorflow
is_training: is_training,
name: name);

public IPoolFunction max_pool_fn => new MaxPoolFunction();

public Tensor max_pool(Tensor value, int[] ksize, int[] strides, string padding, string data_format = "NHWC", string name = null)
=> nn_ops.max_pool(value, ksize, strides, padding, data_format: data_format, name: name);



+ 0
- 42
src/TensorFlowNET.Core/APIs/tf.optimizers.cs View File

@@ -1,42 +0,0 @@
/*****************************************************************************
Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/

using Tensorflow.Keras.Optimizers;

namespace Tensorflow
{
public partial class tensorflow
{
public KerasOptimizers optimizers => new KerasOptimizers();

public class KerasOptimizers
{
public SGD SGD(float learning_rate) => new SGD(learning_rate);

public Adam Adam(float learning_rate = 0.001f,
float beta_1 = 0.9f,
float beta_2 = 0.999f,
float epsilon = 1e-7f,
bool amsgrad = false,
string name = "Adam") => new Adam(learning_rate: learning_rate,
beta_1: beta_1,
beta_2: beta_2,
epsilon: epsilon,
amsgrad: amsgrad,
name: name);
}
}
}

+ 2
- 3
src/TensorFlowNET.Core/APIs/tf.train.cs View File

@@ -15,7 +15,6 @@
******************************************************************************/

using System.Collections.Generic;
using Tensorflow.Keras.Optimizers;
using Tensorflow.Train;

namespace Tensorflow
@@ -87,7 +86,7 @@ namespace Tensorflow
public CheckpointState get_checkpoint_state(string checkpoint_dir, string latest_filename = null)
=> checkpoint_management.get_checkpoint_state(checkpoint_dir, latest_filename: latest_filename);

public Tensor polynomial_decay(float learning_rate,
/*public Tensor polynomial_decay(float learning_rate,
RefVariable global_step,
float decay_steps,
float end_learning_rate = 0.0001f,
@@ -105,7 +104,7 @@ namespace Tensorflow
var decayed_lr = decayed.__call__(global_step);

return decayed_lr;
}
}*/
}
}
}

+ 1
- 1
src/TensorFlowNET.Core/Keras/ArgsDefinition/DataHandlerArgs.cs View File

@@ -14,7 +14,7 @@ namespace Tensorflow.Keras.ArgsDefinition
public int MaxQueueSize { get; set; } = 10;
public int Workers { get; set; } = 1;
public bool UseMultiprocessing { get; set; } = false;
public Model Model { get; set; }
public IModel Model { get; set; }
public IVariableV1 StepsPerExecution { get; set; }
}
}

+ 1
- 1
src/TensorFlowNET.Core/Keras/ArgsDefinition/NodeArgs.cs View File

@@ -4,7 +4,7 @@ namespace Tensorflow.Keras.ArgsDefinition
{
public class NodeArgs
{
public Layer[] InboundLayers { get; set; }
public ILayer[] InboundLayers { get; set; }
public int[] NodeIndices { get; set; }
public int[] TensorIndices { get; set; }
public Tensors InputTensors { get; set; }


+ 1
- 1
src/TensorFlowNET.Core/Keras/ArgsDefinition/SequentialArgs.cs View File

@@ -5,6 +5,6 @@ namespace Tensorflow.Keras.ArgsDefinition
{
public class SequentialArgs : ModelArgs
{
public List<Layer> Layers { get; set; }
public List<ILayer> Layers { get; set; }
}
}

+ 1
- 1
src/TensorFlowNET.Core/Keras/ArgsDefinition/TensorLikeDataAdapterArgs.cs View File

@@ -13,6 +13,6 @@ namespace Tensorflow.Keras.ArgsDefinition
public int MaxQueueSize { get; set; }
public int Worker { get; set; }
public bool UseMultiprocessing { get; set; }
public Model Model { get; set; }
public IModel Model { get; set; }
}
}

+ 0
- 74
src/TensorFlowNET.Core/Keras/Datasets/Mnist.cs View File

@@ -1,74 +0,0 @@
/*****************************************************************************
Copyright 2020 Haiping Chen. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/

using NumSharp;
using System;
using System.IO;
using System.Net;

namespace Tensorflow.Keras.Datasets
{
public class Mnist
{
string origin_folder = "https://storage.googleapis.com/tensorflow/tf-keras-datasets/";
string file_name = "mnist.npz";

/// <summary>
/// Loads the [MNIST dataset](http://yann.lecun.com/exdb/mnist/).
/// </summary>
/// <returns></returns>
public DatasetPass load_data()
{
var file = Download();
var bytes = File.ReadAllBytes(file);
var datax = LoadX(bytes);
var datay = LoadY(bytes);
return new DatasetPass
{
Train = (datax.Item1, datay.Item1),
Test = (datax.Item2, datay.Item2)
};
}

(NDArray, NDArray) LoadX(byte[] bytes)
{
var y = np.Load_Npz<byte[,,]>(bytes);
return (y["x_train.npy"], y["x_test.npy"]);
}

(NDArray, NDArray) LoadY(byte[] bytes)
{
var y = np.Load_Npz<byte[]>(bytes);
return (y["y_train.npy"], y["y_test.npy"]);
}

string Download()
{
var fileSaveTo = Path.Combine(Path.GetTempPath(), file_name);

if (File.Exists(fileSaveTo))
{
Console.WriteLine($"The file {fileSaveTo} already exists");
return fileSaveTo;
}

using var wc = new WebClient();
wc.DownloadFileTaskAsync(origin_folder + file_name, fileSaveTo).Wait();

return fileSaveTo;
}
}
}

+ 0
- 10
src/TensorFlowNET.Core/Keras/Engine/CallContext.cs View File

@@ -1,10 +0,0 @@
namespace Tensorflow.Keras.Engine
{
public class CallContext
{
public CallContextManager enter()
{
return new CallContextManager();
}
}
}

src/TensorFlowNET.Keras/Engine/InputSpec.cs → src/TensorFlowNET.Core/Keras/Engine/IModel.cs View File

@@ -4,7 +4,7 @@ using System.Text;

namespace Tensorflow.Keras.Engine
{
class InputSpec
public interface IModel
{
}
}

+ 16
- 0
src/TensorFlowNET.Core/Keras/Engine/INode.cs View File

@@ -0,0 +1,16 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Engine
{
public interface INode
{
Tensors input_tensors { get; }
Tensors Outputs { get; }
ILayer Layer { get; set; }
List<Tensor> KerasInputs { get; set; }
INode[] ParentNodes { get; }
IEnumerable<(ILayer, int, int, Tensor)> iterate_inbound();
}
}

+ 4
- 6
src/TensorFlowNET.Core/Keras/Engine/KerasHistory.cs View File

@@ -5,12 +5,13 @@
/// </summary>
public class KerasHistory
{
Layer layer;
ILayer layer;
public ILayer Layer => layer;
int node_index;
int tensor_index;
Tensor tensor;

public KerasHistory(Layer layer, int node_index, int tensor_index, Tensor tensor)
public KerasHistory(ILayer layer, int node_index, int tensor_index, Tensor tensor)
{
this.layer = layer;
this.node_index = node_index;
@@ -18,7 +19,7 @@
this.tensor = tensor;
}

public void Deconstruct(out Layer layer, out int node_index, out int tensor_index)
public void Deconstruct(out ILayer layer, out int node_index, out int tensor_index)
{
layer = this.layer;
node_index = this.node_index;
@@ -27,8 +28,5 @@

public override string ToString()
=> $"{layer.GetType().Name} {layer.Name} {tensor.name}";

public static implicit operator Layer(KerasHistory history)
=> history.layer;
}
}

+ 0
- 121
src/TensorFlowNET.Core/Keras/Engine/Node.cs View File

@@ -1,121 +0,0 @@
/*****************************************************************************
Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/

using System.Collections.Generic;
using System.Linq;
using Tensorflow.Keras.ArgsDefinition;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Engine
{
/// <summary>
/// A `Node` describes the connectivity between two layers.
///
/// Each time a layer is connected to some new input,
/// a node is added to `layer._inbound_nodes`.
/// Each time the output of a layer is used by another layer,
/// a node is added to `layer._outbound_nodes`.
/// </summary>
public partial class Node
{
NodeArgs args;

public int[] node_indices;
public int[] tensor_indices;
public Tensors input_tensors => args.InputTensors;
public Tensors Outputs => args.Outputs;
public TensorShape[] input_shapes;
public TensorShape[] output_shapes;
public List<Tensor> KerasInputs = new List<Tensor>();
public Layer Layer { get; set; }
public bool IsInput => args.InputTensors == null;
public int[] FlatInputIds { get; set; }
public int[] FlatOutputIds { get; set; }
bool _single_positional_tensor_passed => KerasInputs.Count() == 1;
Dictionary<int, int> _keras_inputs_ids_and_indices = new Dictionary<int, int>();
public Node[] ParentNodes
{
get
{
var node_deps = new List<Node>();
foreach (var kt in KerasInputs)
{
var (layer, node_index, _) = kt.KerasHistory;
if (layer != null)
node_deps.append(layer.InboundNodes[node_index]);
}
return node_deps.ToArray();
}
}

public Node(Layer layer, NodeArgs args)
{
this.args = args;
this.Layer = layer;

if (args.InputTensors != null)
KerasInputs.AddRange(args.InputTensors);

foreach (var (i, ele) in enumerate(KerasInputs))
_keras_inputs_ids_and_indices[i] = ele.GetHashCode();

// Wire up Node to Layers.
layer.InboundNodes.Add(this);
foreach (var kt in KerasInputs)
{
if (kt.KerasHistory == null)
continue;
var (inbound_layer, _, _) = kt.KerasHistory;
if (inbound_layer != null)
inbound_layer.OutboundNodes.Add(this);
}

// Set metadata on outputs.
var node_index = layer.InboundNodes.Count - 1;
foreach (var (i, tensor) in enumerate(Outputs))
tensor.KerasHistory = new KerasHistory(layer, node_index, i, tensor);

// Cached for performance.
FlatInputIds = KerasInputs.Select(x => x.GetHashCode()).ToArray();
FlatOutputIds = Outputs.Select(x => x.GetHashCode()).ToArray();
}

/// <summary>
/// Maps Keras Tensors to computed Tensors using `tensor_dict`.
/// </summary>
/// <param name="tensor_dict"></param>
/// <returns></returns>
public Tensors MapArguments(Dictionary<int, Queue<Tensor>> tensor_dict)
{
if (_single_positional_tensor_passed)
{
var kt_id = _keras_inputs_ids_and_indices[0];
return tensor_dict[kt_id].Dequeue();
}
else
{
var flat_arguments = KerasInputs.Select(x => x).ToArray();
foreach (var (kt_index, kt_id) in enumerate(_keras_inputs_ids_and_indices))
flat_arguments[kt_index] = tensor_dict[kt_id].Dequeue();

return flat_arguments;
}
}

public override string ToString()
=> $"{Layer.Name}, {KerasInputs.Count} inputs: {string.Join(",", KerasInputs.Select(x => x.name))}";
}
}

+ 0
- 134
src/TensorFlowNET.Core/Keras/Engine/Sequential.cs View File

@@ -1,134 +0,0 @@
/*****************************************************************************
Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/

using System.Collections.Generic;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Layers;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Engine
{
/// <summary>
/// `Sequential` groups a linear stack of layers into a `tf.keras.Model`.
/// `Sequential` provides training and inference features on this model.
/// </summary>
public class Sequential : Model
{
SequentialArgs args;
bool _is_graph_network;
Tensor inputs;
Tensor outputs;

bool computeOutputAndMaskJointly;
bool autoTrackSubLayers;
TensorShape inferredInputShape;
bool hasExplicitInputShape;
TF_DataType inputDType;
List<Layer> layers => args.Layers;
public TensorShape output_shape => outputs.TensorShape;
bool built = false;

public Sequential(SequentialArgs args)
: base(new ModelArgs
{
Name = args.Name
})
{
this.args = args;
if (args.Layers == null)
args.Layers = new List<Layer>();
// SupportsMasking = true;
computeOutputAndMaskJointly = true;
autoTrackSubLayers = false;
hasExplicitInputShape = false;
_is_graph_network = false;
}

public void add(Tensor tensor)
{
Layer layer = tensor.KerasHistory;
add(layer);
}

/// <summary>
/// Adds a layer instance on top of the layer stack.
/// </summary>
/// <param name="layer"></param>
public void add(Layer layer)
{
built = false;
var set_inputs = false;
if (layers.Count == 0)
{
if (layer is InputLayer)
{
set_inputs = true;
}
else
{
if (layer.BatchInputShape != null)
{
// Instantiate an input layer.
var x = tf.keras.Input(
shape: layer.BatchInputShape,
dtype: layer.DType,
name: layer.Name + "_input");

// This will build the current layer
// and create the node connecting the current layer
// to the input layer we just created.
layer.Apply(x);
set_inputs = true;
}
}

if (set_inputs)
{
// If an input layer (placeholder) is available.
outputs = layer.InboundNodes[^1].Outputs;
}

}
else if (outputs != null)
{
outputs = layer.Apply(outputs);
}

if (set_inputs || _is_graph_network)
{
_init_graph_network(inputs, outputs);
}
else
{

}
}

void _init_graph_network(Tensor inputs, Tensor outputs)
{
_is_graph_network = true;
this.inputs = inputs;
this.outputs = outputs;
built = true;
_map_graph_network(inputs, outputs);
}

void _map_graph_network(Tensor inputs, Tensor outputs)
{
layers.add(outputs.KerasHistory);
}
}
}

+ 20
- 0
src/TensorFlowNET.Core/Keras/Layers/ILayer.cs View File

@@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.Engine;

namespace Tensorflow.Keras
{
public interface ILayer
{
string Name { get; }
bool Trainable { get; }
List<ILayer> Layers { get; }
List<INode> InboundNodes { get; }
List<INode> OutboundNodes { get; }
Tensors Apply(Tensors inputs, Tensor state = null, bool is_training = false);
List<IVariableV1> trainable_variables { get; }
TensorShape output_shape { get; }
int count_params();
}
}

+ 0
- 41
src/TensorFlowNET.Core/Keras/Losses/Loss.cs View File

@@ -1,41 +0,0 @@
using System;
using Tensorflow.Keras.Utils;

namespace Tensorflow.Keras.Losses
{
/// <summary>
/// Loss base class.
/// </summary>
public abstract class Loss
{
protected string reduction;
protected string name;
bool _allow_sum_over_batch_size;
string _name_scope;

public string Reduction => reduction;

public Loss(string reduction = ReductionV2.AUTO, string name = null)
{
this.reduction = reduction;
this.name = name;
_allow_sum_over_batch_size = false;
}

public virtual Tensor Apply(Tensor y_true, Tensor y_pred, bool from_logits = false, int axis = -1)
{
throw new NotImplementedException("");
}

public Tensor Call(Tensor y_true, Tensor y_pred)
{
var losses = Apply(y_true, y_pred);
return losses_utils.compute_weighted_loss(losses, reduction: ReductionV2.SUM_OVER_BATCH_SIZE);
}

void _set_name_scope()
{
_name_scope = name;
}
}
}

+ 0
- 12
src/TensorFlowNET.Core/Keras/Losses/LossFunctionWrapper.cs View File

@@ -1,12 +0,0 @@
namespace Tensorflow.Keras.Losses
{
public class LossFunctionWrapper : Loss
{
public LossFunctionWrapper(string reduction = ReductionV2.AUTO,
string name = null)
: base(reduction: reduction,
name: name)
{
}
}
}

+ 0
- 33
src/TensorFlowNET.Core/Keras/Losses/SparseCategoricalCrossentropy.cs View File

@@ -1,33 +0,0 @@
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Losses
{
public class SparseCategoricalCrossentropy : LossFunctionWrapper, ILossFunc
{
public SparseCategoricalCrossentropy(bool from_logits = false,
string reduction = ReductionV2.AUTO,
string name = "sparse_categorical_crossentropy") :
base(reduction: reduction,
name: name)
{

}

public override Tensor Apply(Tensor target, Tensor output, bool from_logits = false, int axis = -1)
{
target = tf.cast(target, dtype: TF_DataType.TF_INT64);

// Try to adjust the shape so that rank of labels = rank of logits - 1.
var output_shape = array_ops.shape_v2(output);
var output_rank = output.TensorShape.ndim;
var target_rank = target.TensorShape.ndim;
var update_shape = target_rank != output_rank - 1;
if (update_shape)
{
target = array_ops.reshape(target, new int[] { -1 });
output = array_ops.reshape(output, new int[] { -1, output_shape[-1].numpy() });
}
return tf.nn.sparse_softmax_cross_entropy_with_logits(target, output);
}
}
}

+ 0
- 14
src/TensorFlowNET.Core/Keras/Metrics/Mean.cs View File

@@ -1,14 +0,0 @@
namespace Tensorflow.Keras.Metrics
{
/// <summary>
/// Computes the (weighted) mean of the given values.
/// </summary>
public class Mean : Reduce
{
public Mean(string name = "mean", TF_DataType dtype = TF_DataType.TF_FLOAT)
: base(Reduction.WEIGHTED_MEAN, name, dtype: dtype)
{

}
}
}

+ 0
- 27
src/TensorFlowNET.Core/Keras/Metrics/MeanMetricWrapper.cs View File

@@ -1,27 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Metrics
{
public class MeanMetricWrapper : Mean
{
string name;
Func<Tensor, Tensor, Tensor> _fn = null;
public MeanMetricWrapper(Func<Tensor, Tensor, Tensor> fn, string name, TF_DataType dtype = TF_DataType.TF_FLOAT)
: base(name: name, dtype: dtype)
{
_fn = fn;
}

public override Tensor update_state(Tensor y_true, Tensor y_pred, Tensor sample_weight = null)
{
y_true = math_ops.cast(y_true, _dtype);
y_pred = math_ops.cast(y_pred, _dtype);

var matches = _fn(y_true, y_pred);
return update_state(matches, sample_weight: sample_weight);
}
}
}

+ 0
- 62
src/TensorFlowNET.Core/Keras/Metrics/Metric.cs View File

@@ -1,62 +0,0 @@
using System;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Metrics
{
/// <summary>
/// Encapsulates metric logic and state.
/// </summary>
public class Metric : Layer
{
protected IVariableV1 total;
protected IVariableV1 count;
protected string _reduction;
protected TF_DataType _dtype;

public Metric(string name = null, TF_DataType dtype = TF_DataType.DtInvalid)
: base(new LayerArgs
{
Name = name,
DType = dtype
})
{
stateful = true;
built = true;
}

protected override IVariableV1 add_weight(string name,
TensorShape shape = null,
TF_DataType dtype = TF_DataType.TF_FLOAT,
IInitializer initializer = null,
IRegularizer regularizer = null,
VariableSynchronization synchronization = VariableSynchronization.OnRead,
VariableAggregation aggregation = VariableAggregation.Sum,
bool trainable = true,
Func<VariableArgs, IVariableV1> getter = null)
{
if (shape == null)
shape = new TensorShape(new int[0]);

return tf_with(ops.init_scope(), delegate
{
return base.add_weight(name, shape,
dtype: dtype,
trainable: false,
initializer: initializer,
synchronization: synchronization,
aggregation: aggregation);
});
}

public virtual Tensor update_state(Tensor y_true, Tensor y_pred, Tensor sample_weight = null)
=> throw new NotImplementedException("");

public virtual Tensor result()
=> throw new NotImplementedException("");

public override string ToString()
=> $"{name} {(float)total.numpy()}/{(float)count.numpy()}";
}
}

+ 0
- 74
src/TensorFlowNET.Core/Keras/Metrics/Reduce.cs View File

@@ -1,74 +0,0 @@
using Tensorflow.Keras.Losses;
using Tensorflow.Keras.Utils;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Metrics
{
/// <summary>
/// Encapsulates metrics that perform a reduce operation on the values.
/// </summary>
public class Reduce : Metric
{
public Reduce(string reduction, string name, TF_DataType dtype = TF_DataType.DtInvalid)
: base(name: name, dtype: dtype)
{
_reduction = reduction;
_dtype = dtype;
total = add_weight("total", initializer: tf.zeros_initializer);

if (reduction == Reduction.WEIGHTED_MEAN ||
reduction == Reduction.SUM_OVER_BATCH_SIZE)
{
count = add_weight("count", initializer: tf.zeros_initializer);
}
}

public Tensor update_state(Tensor values, Tensor sample_weight = null)
{
if (sample_weight != null)
{
(values, sample_weight) = losses_utils.squeeze_or_expand_dimensions(
values, sample_weight: sample_weight);

sample_weight = math_ops.cast(sample_weight, dtype: values.dtype);
values = math_ops.multiply(values, sample_weight);
}

Tensor update_total_op = null;
var value_sum = math_ops.reduce_sum(values);
tf_with(ops.control_dependencies(new[] { value_sum }), ctl =>
{
update_total_op = total.assign_add(value_sum);
});

// Exit early if the reduction doesn't have a denominator.
if (_reduction == Reduction.SUM)
return update_total_op;

// Update `count` for reductions that require a denominator.
Tensor num_values = null;
if (_reduction == Reduction.SUM_OVER_BATCH_SIZE)
num_values = math_ops.cast(array_ops.size(values), _dtype);
else if (_reduction == ReductionV2.WEIGHTED_MEAN)
{
if (sample_weight == null)
num_values = math_ops.cast(array_ops.size(values), _dtype);
else
num_values = math_ops.reduce_sum(sample_weight);
}

return tf_with(ops.control_dependencies(new[] { update_total_op }), ctl
=> count.assign_add(num_values));
}

public override Tensor result()
{
if (_reduction == Reduction.SUM)
return array_ops.identity(total.AsTensor());
else if (_reduction == Reduction.WEIGHTED_MEAN || _reduction == Reduction.SUM_OVER_BATCH_SIZE)
return math_ops.div_no_nan(total.AsTensor(), count.AsTensor());

return base.result();
}
}
}

+ 0
- 6
src/TensorFlowNET.Core/Keras/Metrics/Sum.cs View File

@@ -1,6 +0,0 @@
namespace Tensorflow.Keras.Metrics
{
class Sum
{
}
}

+ 0
- 210
src/TensorFlowNET.Core/Layers/Layer.cs View File

@@ -1,210 +0,0 @@
/*****************************************************************************
Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/

using System;
using System.Collections.Generic;
using Tensorflow.Keras.ArgsDefinition;
using static Tensorflow.Binding;

namespace Tensorflow.Layers
{
public class Layer : Keras.Engine.Layer
{
protected Graph _graph;

protected VariableScope _scope;
protected VariableScope _current_scope;

protected bool? _reuse;
protected bool _use_resource_variables;
protected bool _keras_style;

public Layer(bool trainable = true,
string name = null,
TF_DataType dtype = TF_DataType.DtInvalid,
bool? _reuse = null) :
base(new LayerArgs
{
Trainable = trainable,
Name = name,
DType = dtype
})
{
// For backwards compatibility, legacy layers do not use `ResourceVariable`
// by default.
this._use_resource_variables = false;
this._reuse = _reuse;

// Avoid an incorrect lint error
trainable_weights = new List<IVariableV1>();
non_trainable_weights = new List<IVariableV1>();
this.built = false;
_keras_style = false;
}

public virtual (Tensor, Tensor) apply(Tensor inputs, Tensor training = null)
{
var results = __call__(inputs, training: training);
return (results[0], results[1]);
}

public Tensors __call__(Tensors inputs,
Tensor state = null,
Tensor training = null,
VariableScope scope = null)
{
_set_scope(scope);
_graph = ops._get_graph_from_inputs(inputs, graph: _graph);

variable_scope scope_context_manager = null;
if (built)
{
scope_context_manager = tf.variable_scope(_scope,
reuse: true,
auxiliary_name_scope: false);
}
else
{
scope_context_manager = tf.variable_scope(_scope,
reuse: _reuse,
auxiliary_name_scope: false);
}

Tensors outputs = null;
tf_with(scope_context_manager, scope2 =>
{
_current_scope = scope2;
// Actually call layer
outputs = base.Apply(inputs,
state: state,
is_training: training == null ? false : false);
});


// Update global default collections.
_add_elements_to_collection(updates.ToArray(), new string[] { tf.GraphKeys.UPDATE_OPS });

return outputs;
}

protected virtual void _add_elements_to_collection(Operation[] elements, string[] collection_list)
{
foreach (var name in collection_list)
{
var collection = ops.get_collection_ref<Operation>(name);

foreach (var element in elements)
if (!collection.Contains(element))
collection.Add(element);
}
}

/// <summary>
/// Adds a new variable to the layer, or gets an existing one; returns it.
/// </summary>
/// <param name="name"></param>
/// <param name="shape"></param>
/// <param name="dtype"></param>
/// <param name="initializer"></param>
/// <param name="trainable"></param>
/// <param name="synchronization"></param>
/// <param name="aggregation"></param>
/// <returns></returns>
protected virtual IVariableV1 add_weight(string name,
int[] shape,
TF_DataType dtype = TF_DataType.DtInvalid,
IInitializer initializer = null,
bool trainable = true,
VariableSynchronization synchronization = VariableSynchronization.Auto,
VariableAggregation aggregation = VariableAggregation.None)
{
var default_graph = ops.get_default_graph();
Graph init_graph = null;
IVariableV1[] existing_variables = null;

if (synchronization == VariableSynchronization.OnRead)
trainable = false;

if (default_graph.building_function)
{
throw new NotImplementedException("add_weight");
}
else
{
init_graph = default_graph;
existing_variables = variables.global_variables().ToArray();
}

if (dtype == TF_DataType.DtInvalid)
dtype = TF_DataType.TF_FLOAT;

_set_scope();
var reuse = built || (_reuse != null && _reuse.Value);
return tf_with(tf.variable_scope(_scope,
reuse: reuse,
auxiliary_name_scope: false), scope =>
{
_current_scope = scope;
return tf_with(ops.name_scope(_name_scope()), delegate
{
var variable = base.add_weight(name,
shape,
dtype: dtype,
initializer: initializer,
trainable: trainable,
getter: (args) =>
tf.compat.v1.get_variable(args.Name,
shape: args.Shape,
dtype: args.DType,
initializer: args.Initializer,
trainable: args.Trainable)
);

//if (init_graph != null)
//var trainable_variables = variables.trainable_variables();

return variable;
});
});
}

protected override string _name_scope()
{
return _current_scope.original_name_scope;
}

protected void _set_scope(VariableScope scope = null)
{
if (_scope == null)
{
if (_reuse.HasValue && _reuse.Value)
{
throw new NotImplementedException("_set_scope _reuse.HasValue");
/*with(tf.variable_scope(scope == null ? _base_name : scope),
captured_scope => _scope = captured_scope);*/
}
else
{
tf_with(tf.variable_scope(scope, default_name: base_name), captured_scope =>
{
// convert variable_scope to VariableScope
_scope = captured_scope;
});
}
}
}
}
}

+ 1
- 1
src/TensorFlowNET.Core/Operations/NnOps/BasicLSTMCell.cs View File

@@ -71,7 +71,7 @@ namespace Tensorflow
/// <param name="training"></param>
/// <param name="state"></param>
/// <returns></returns>
protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
protected Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
var one = constant_op.constant(1, dtype: dtypes.int32);
// Parameters of gates are concatenated into one multiply for efficiency.


+ 1
- 1
src/TensorFlowNET.Core/Operations/NnOps/BasicRNNCell.cs View File

@@ -66,7 +66,7 @@ namespace Tensorflow
built = true;
}

protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
protected Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
// Most basic RNN: output = new_state = act(W * input + U * state + B).
var concat = array_ops.concat(new Tensor[] { inputs, state }, 1);


+ 1
- 1
src/TensorFlowNET.Core/Operations/NnOps/ConvolutionInternal.cs View File

@@ -22,7 +22,7 @@ using static Tensorflow.Binding;

namespace Tensorflow.Operations
{
internal class ConvolutionInternal
public class ConvolutionInternal
{
ConvolutionalArgs args;



+ 151
- 3
src/TensorFlowNET.Core/Operations/NnOps/LayerRNNCell.cs View File

@@ -13,17 +13,165 @@
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/
using static Tensorflow.Binding;
using Tensorflow.Keras.Engine;
using System;

namespace Tensorflow
{
public class LayerRnnCell : RnnCell
{
public LayerRnnCell(bool? _reuse = null,
string name = null,
TF_DataType dtype = TF_DataType.DtInvalid) : base(_reuse: _reuse,
protected InputSpec inputSpec;
protected bool built;
protected Graph _graph;

protected VariableScope _scope;
protected VariableScope _current_scope;

protected bool? _reuse;
protected bool _use_resource_variables;
protected bool _keras_style;

public LayerRnnCell(bool trainable = true,
string name = null,
TF_DataType dtype = TF_DataType.DtInvalid,
bool? _reuse = null) : base(_reuse: _reuse,
name: name,
dtype: dtype)
{
// For backwards compatibility, legacy layers do not use `ResourceVariable`
// by default.
this._use_resource_variables = false;
this._reuse = _reuse;

// Avoid an incorrect lint error
this.built = false;
_keras_style = false;
}

protected virtual void build(TensorShape inputs_shape)
{

}

public virtual (Tensor, Tensor) apply(Tensor inputs, Tensor training = null)
{
var results = __call__(inputs, training: training);
return (results[0], results[1]);
}

public Tensors __call__(Tensors inputs,
Tensor state = null,
Tensor training = null,
VariableScope scope = null)
{
_set_scope(scope);
_graph = ops._get_graph_from_inputs(inputs, graph: _graph);

variable_scope scope_context_manager = null;
if (built)
{
scope_context_manager = tf.variable_scope(_scope,
reuse: true,
auxiliary_name_scope: false);
}
else
{
scope_context_manager = tf.variable_scope(_scope,
reuse: _reuse,
auxiliary_name_scope: false);
}

Tensors outputs = null;
tf_with(scope_context_manager, scope2 =>
{
_current_scope = scope2;
// Actually call layer

});


// Update global default collections.

return outputs;
}

protected virtual void _add_elements_to_collection(Operation[] elements, string[] collection_list)
{
foreach (var name in collection_list)
{
var collection = ops.get_collection_ref<Operation>(name);

foreach (var element in elements)
if (!collection.Contains(element))
collection.Add(element);
}
}

/// <summary>
/// Adds a new variable to the layer, or gets an existing one; returns it.
/// </summary>
/// <param name="name"></param>
/// <param name="shape"></param>
/// <param name="dtype"></param>
/// <param name="initializer"></param>
/// <param name="trainable"></param>
/// <param name="synchronization"></param>
/// <param name="aggregation"></param>
/// <returns></returns>
protected virtual IVariableV1 add_weight(string name,
int[] shape,
TF_DataType dtype = TF_DataType.DtInvalid,
IInitializer initializer = null,
bool trainable = true,
VariableSynchronization synchronization = VariableSynchronization.Auto,
VariableAggregation aggregation = VariableAggregation.None)
{
var default_graph = ops.get_default_graph();
Graph init_graph = null;
IVariableV1[] existing_variables = null;

if (synchronization == VariableSynchronization.OnRead)
trainable = false;

if (default_graph.building_function)
{
throw new NotImplementedException("add_weight");
}
else
{
init_graph = default_graph;
existing_variables = variables.global_variables().ToArray();
}

if (dtype == TF_DataType.DtInvalid)
dtype = TF_DataType.TF_FLOAT;

_set_scope();
var reuse = built || (_reuse != null && _reuse.Value);
return tf.Variable(0);
}

protected string _name_scope()
{
return _current_scope.original_name_scope;
}

protected void _set_scope(VariableScope scope = null)
{
if (_scope == null)
{
if (_reuse.HasValue && _reuse.Value)
{
throw new NotImplementedException("_set_scope _reuse.HasValue");
/*with(tf.variable_scope(scope == null ? _base_name : scope),
captured_scope => _scope = captured_scope);*/
}
else
{

}
}
}
}
}

+ 28
- 5
src/TensorFlowNET.Core/Operations/NnOps/RNNCell.cs View File

@@ -15,6 +15,9 @@
******************************************************************************/

using System;
using System.Collections.Generic;
using Tensorflow.Keras;
using Tensorflow.Keras.Engine;
using Tensorflow.Operations;
using Tensorflow.Util;
using static Tensorflow.Binding;
@@ -42,7 +45,7 @@ namespace Tensorflow
/// matching structure of Tensors having shape `[batch_size].concatenate(s)`
/// for each `s` in `self.batch_size`.
/// </summary>
public abstract class RnnCell : Layers.Layer
public abstract class RnnCell : ILayer
{
/// <summary>
/// Attribute that indicates whether the cell is a TF RNN cell, due the slight
@@ -52,14 +55,24 @@ namespace Tensorflow
public virtual object state_size { get; }

public virtual int output_size { get; }
public string Name { get => throw new NotImplementedException(); set => throw new NotImplementedException(); }

public List<INode> InboundNodes => throw new NotImplementedException();

public List<INode> OutboundNodes => throw new NotImplementedException();

public List<ILayer> Layers => throw new NotImplementedException();

public bool Trainable => throw new NotImplementedException();

public List<IVariableV1> trainable_variables => throw new NotImplementedException();

public TensorShape output_shape => throw new NotImplementedException();

public RnnCell(bool trainable = true,
string name = null,
TF_DataType dtype = TF_DataType.DtInvalid,
bool? _reuse = null) : base(trainable: trainable,
name: name,
dtype: dtype,
_reuse: _reuse)
bool? _reuse = null)
{
_is_tf_rnn_cell = true;
}
@@ -109,5 +122,15 @@ namespace Tensorflow

throw new NotImplementedException("_zero_state_tensors");
}

public Tensors Apply(Tensors inputs, Tensor state = null, bool is_training = false)
{
throw new NotImplementedException();
}

public int count_params()
{
throw new NotImplementedException();
}
}
}

+ 2
- 2
src/TensorFlowNET.Core/Operations/NnOps/rnn.cs View File

@@ -363,8 +363,8 @@ namespace Tensorflow.Operations
Tensor[] outputs = null;
if (sequence_length != null)
throw new NotImplementedException("sequence_length != null");
else
outputs = cell.__call__(input_t_t, state: state1);
/*else
outputs = cell.__call__(input_t_t, state: state1);*/

var (output, new_state) = (outputs[0], outputs[1]);
// Keras cells always wrap state as list, even if it's a single tensor.


+ 1
- 1
src/TensorFlowNET.Core/Operations/nn_ops.cs View File

@@ -24,7 +24,7 @@ namespace Tensorflow
{
public class nn_ops
{
internal static ConvolutionInternal convolution_internal(string padding,
public static ConvolutionInternal convolution_internal(string padding,
int[] strides,
int[] dilation_rate,
string name = null,


+ 0
- 4
src/TensorFlowNET.Core/Tensorflow.Binding.csproj View File

@@ -87,8 +87,4 @@ TensorFlow .NET v0.30 is focused on making more Keras API work including:
<PackageReference Include="NumSharp.Lite" Version="0.1.9" />
<PackageReference Include="Protobuf.Text" Version="0.4.0" />
</ItemGroup>

<ItemGroup>
<Folder Include="Keras\Initializers\" />
</ItemGroup>
</Project>

+ 0
- 10
src/TensorFlowNET.Keras/Activations.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras
{
class Activations
{
}
}

src/TensorFlowNET.Core/Keras/Activations/Activations.Linear.cs → src/TensorFlowNET.Keras/Activations/Activations.Linear.cs View File


src/TensorFlowNET.Core/Keras/Activations/Activations.Relu.cs → src/TensorFlowNET.Keras/Activations/Activations.Relu.cs View File


src/TensorFlowNET.Core/Keras/Activations/Activations.Sigmoid.cs → src/TensorFlowNET.Keras/Activations/Activations.Sigmoid.cs View File


src/TensorFlowNET.Core/Keras/Activations/Activations.Tanh.cs → src/TensorFlowNET.Keras/Activations/Activations.Tanh.cs View File


+ 0
- 35
src/TensorFlowNET.Keras/Applications/Densenet.cs View File

@@ -1,35 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class Densenet
{
public static Tensor dense_block(Tensor x, int blocks, string name) => throw new NotImplementedException();

public static Tensor transition_block(Tensor x, float reduction, string name) => throw new NotImplementedException();

public static Tensor conv_block(Tensor x, float growth_rate, string name) => throw new NotImplementedException();

public static Model DenseNet(int blocks, bool include_top=true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model DenseNet121(int blocks, bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model DenseNet169(int blocks, bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model DenseNet201(int blocks, bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 60
src/TensorFlowNET.Keras/Applications/Efficientnet.cs View File

@@ -1,60 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class BlockArg
{

}

public class Efficientnet
{
public static Model EfficientNet(float width_coefficient, float depth_coefficient, int default_size, float dropout_rate = 0.2f,
float drop_connect_rate = 0.2f, int depth_divisor = 8, string activation = "swish",
BlockArg[] blocks_args = null, string model_name = "efficientnet", bool include_top = true,
string weights = "imagenet", Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor block(Tensor inputs, string activation= "swish", float drop_rate= 0f,string name= "",
int filters_in= 32, int filters_out= 16, int kernel_size= 3, int strides= 1,
int expand_ratio= 1, float se_ratio= 0, bool id_skip= true) => throw new NotImplementedException();

public static Model EfficientNetB0(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model EfficientNetB1(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model EfficientNetB2(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model EfficientNetB3(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model EfficientNetB4(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model EfficientNetB5(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model EfficientNetB6(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model EfficientNetB7(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 22
src/TensorFlowNET.Keras/Applications/ImagenetUtils.cs View File

@@ -1,22 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class ImagenetUtils
{
public static Tensor preprocess_input(Tensor x, string data_format= null, string mode= "caffe") => throw new NotImplementedException();
public static Tensor decode_predictions(Tensor preds, int top= 5) => throw new NotImplementedException();

public static Tensor _preprocess_numpy_input(Tensor x, string data_format, string mode) => throw new NotImplementedException();

public static Tensor _preprocess_symbolic_input(Tensor x, string data_format, string mode) => throw new NotImplementedException();

public static TensorShape obtain_input_shape(TensorShape input_shape, int default_size, int min_size,
string data_format, bool require_flatten, string weights= null) => throw new NotImplementedException();

public static ((int, int), (int, int)) correct_pad(Tensor inputs, (int, int) kernel_size) => throw new NotImplementedException();
}
}

+ 0
- 22
src/TensorFlowNET.Keras/Applications/InceptionResnetV2.cs View File

@@ -1,22 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class InceptionResnetV2
{
public static Model InceptionResNetV2(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor conv2d_bn(Tensor x, int filters, (int, int) kernel_size, (int, int) strides, string padding= "same",
string activation= "relu", bool use_bias= false, string name= null) => throw new NotImplementedException();

public static Tensor inception_resnet_block(Tensor x, float scale, string block_type, int block_idx, string activation= "relu") => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 19
src/TensorFlowNET.Keras/Applications/InceptionV3.cs View File

@@ -1,19 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class InceptionV3
{
public static Model Inceptionv3(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor conv2d_bn(Tensor x, int filters, int num_row, int num_col, string padding = "same", (int, int)? strides = null, string name = null) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 18
src/TensorFlowNET.Keras/Applications/Mobilenet.cs View File

@@ -1,18 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class Mobilenet
{
public static Model MobileNet(TensorShape input_shape= null, float alpha= 1.0f, int depth_multiplier= 1, float dropout= 1e-3f,
bool include_top= true, string weights= "imagenet", Tensor input_tensor= null, string pooling= null, int classes= 1000) => throw new NotImplementedException();

public static Tensor conv2d_bn(Tensor x, int filters, float alpha, (int, int)? kernel = null, (int, int)? strides = null) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 21
src/TensorFlowNET.Keras/Applications/MobilenetV2.cs View File

@@ -1,21 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class MobilenetV2
{
public static Model MobileNetV2(TensorShape input_shape = null, float alpha = 1.0f, bool include_top = true,
string weights = "imagenet", Tensor input_tensor = null, string pooling = null,
int classes = 1000) => throw new NotImplementedException();

public static Tensor _inverted_res_block(Tensor inputs, int expansion, (int, int) stride, float alpha, int filters, string block_id) => throw new NotImplementedException();

public static Tensor _make_divisible(Tensor v, Tensor divisor, Tensor min_value= null) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 31
src/TensorFlowNET.Keras/Applications/Nasnet.cs View File

@@ -1,31 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class Nasnet
{
public static Model NASNet(TensorShape input_shape = null, int penultimate_filters = 4032, int num_blocks = 6, int stem_block_filters = 96,
bool skip_reduction = true, int filter_multiplier = 2, bool include_top = true, string weights = null,
Tensor input_tensor = null, string pooling = null, int classes = 1000, int? default_size = null) => throw new NotImplementedException();

public static Model NASNetMobile(TensorShape input_shape = null, bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model NASNetLarge(TensorShape input_shape = null, bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor _separable_conv_block(Tensor ip, int filters, (int, int)? kernel_size= null, (int, int)? strides= null, string block_id= null) => throw new NotImplementedException();

public static Tensor _adjust_block(Tensor p, Tensor ip, int filters, string block_id= null) => throw new NotImplementedException();

public static Tensor _normal_a_cell(Tensor p, Tensor ip, int filters, string block_id = null) => throw new NotImplementedException();

public static Tensor _reduction_a_cell(Tensor p, Tensor ip, int filters, string block_id = null) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 41
src/TensorFlowNET.Keras/Applications/Resnet.cs View File

@@ -1,41 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class Resnet
{
public static Model ResNet(Func<Tensor, Tensor> stack_fn, bool preact, bool use_bias, string model_name= "resnet", bool include_top= true,
string weights= "imagenet", Tensor input_tensor= null, TensorShape input_shape= null, string pooling= null,
int classes= 1000) => throw new NotImplementedException();

public static Tensor block1(Tensor x, int filters, int kernel_size= 3, int stride= 1, bool conv_shortcut= true, string name= null) => throw new NotImplementedException();

public static Tensor stack1(Tensor x, int filters, int blocks, int stride1 = 2, string name = null) => throw new NotImplementedException();

public static Tensor block2(Tensor x, int filters, int kernel_size = 3, int stride = 1, bool conv_shortcut = true, string name = null) => throw new NotImplementedException();

public static Tensor stack2(Tensor x, int filters, int blocks, int stride1 = 2, string name = null) => throw new NotImplementedException();

public static Tensor block3(Tensor x, int filters, int kernel_size = 3, int stride = 1, int groups = 32, bool conv_shortcut = true, string name = null) => throw new NotImplementedException();

public static Tensor stack3(Tensor x, int filters, int blocks, int stride1 = 2, int groups = 32, string name = null) => throw new NotImplementedException();

public static Model ResNet50(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model ResNet101(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model ResNet152(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 25
src/TensorFlowNET.Keras/Applications/ResnetV2.cs View File

@@ -1,25 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class ResnetV2
{
public static Model ResNet50V2(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model ResNet101V2(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Model ResNet152V2(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 17
src/TensorFlowNET.Keras/Applications/Vgg16.cs View File

@@ -1,17 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class Vgg16
{
public static Model VGG16(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 17
src/TensorFlowNET.Keras/Applications/Vgg19.cs View File

@@ -1,17 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class Vgg19
{
public static Model VGG19(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 17
src/TensorFlowNET.Keras/Applications/Xception.cs View File

@@ -1,17 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Applications
{
public class Xception
{
public static Model XCeption(bool include_top = true, string weights = "imagenet",
Tensor input_tensor = null, TensorShape input_shape = null,
string pooling = null, int classes = 1000) => throw new NotImplementedException();

public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException();

public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException();
}
}

+ 0
- 29
src/TensorFlowNET.Keras/Args.cs View File

@@ -1,29 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras
{
public class Args
{
private List<object> args = new List<object>();

public object this[int index]
{
get
{
return args.Count < index ? args[index] : null;
}
}

public T Get<T>(int index)
{
return args.Count < index ? (T)args[index] : default(T);
}

public void Add<T>(T arg)
{
args.Add(arg);
}
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Backend.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras
{
class Backend
{
}
}

src/TensorFlowNET.Core/Keras/BackendBase.cs → src/TensorFlowNET.Keras/BackendBase.cs View File


+ 0
- 10
src/TensorFlowNET.Keras/BackendConfig.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras
{
class BackendConfig
{
}
}

src/TensorFlowNET.Core/Keras/BackendImpl.cs → src/TensorFlowNET.Keras/BackendImpl.cs View File


+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/BaseLogger.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class BaseLogger
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/CSVLogger.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class CSVLogger
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/Callback.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class Callback
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/CallbackList.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class CallbackList
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/EarlyStopping.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class EarlyStopping
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/History.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class History
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/LambdaCallback.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class LambdaCallback
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/LearningRateScheduler.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class LearningRateScheduler
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/ModelCheckpoint.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class ModelCheckpoint
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/ProgbarLogger.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class ProgbarLogger
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/ReduceLROnPlateau.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class ReduceLROnPlateau
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/RemoteMonitor.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class RemoteMonitor
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/TensorBoard.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class TensorBoard
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/TensorBoardV1.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class TensorBoardV1
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Callbacks/TerminateOnNaN.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Callbacks
{
class TerminateOnNaN
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Constraints/ConstraintBase.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Constraints
{
public abstract class ConstraintBase
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Constraints/MaxNorm.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Constraints
{
class MaxNorm
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Constraints/MinMaxNorm.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Constraints
{
class MinMaxNorm
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Constraints/NonNeg.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Constraints
{
class NonNeg
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Constraints/RadialConstraint.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Constraints
{
class RadialConstraint
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Constraints/UnitNorm.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Constraints
{
class UnitNorm
{
}
}

+ 0
- 13
src/TensorFlowNET.Keras/Core.cs View File

@@ -1,13 +0,0 @@
using Tensorflow;
using static Tensorflow.Binding;

namespace Keras
{
public static class Keras
{
public static Tensor create_tensor(int[] shape, float mean = 0, float stddev = 1, TF_DataType dtype = TF_DataType.TF_FLOAT, int? seed = null, string name = null)
{
return tf.truncated_normal(shape: shape, mean: mean, stddev: stddev, dtype: dtype, seed: seed, name: name);
}
}
}

+ 0
- 11
src/TensorFlowNET.Keras/Datasets/BostonHousing.cs View File

@@ -1,11 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Datasets
{
public class BostonHousing
{
public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path = "boston_housing.npz", float test_split = 0.2f, int seed = 113) => throw new NotImplementedException();
}
}

+ 0
- 11
src/TensorFlowNET.Keras/Datasets/Cifar.cs View File

@@ -1,11 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Datasets
{
public class Cifar
{
public (Tensor, Tensor) load_batch(string fpath, string label_key = "labels") => throw new NotImplementedException();
}
}

+ 0
- 11
src/TensorFlowNET.Keras/Datasets/Cifar10.cs View File

@@ -1,11 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Datasets
{
public class Cifar10
{
public static ((Tensor, Tensor), (Tensor, Tensor)) load_data() => throw new NotImplementedException();
}
}

+ 0
- 11
src/TensorFlowNET.Keras/Datasets/Cifar100.cs View File

@@ -1,11 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Datasets
{
public class Cifar100
{
public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string label_mode = "fine") => throw new NotImplementedException();
}
}

src/TensorFlowNET.Core/Keras/Datasets/DatasetPass.cs → src/TensorFlowNET.Keras/Datasets/DatasetPass.cs View File


+ 0
- 11
src/TensorFlowNET.Keras/Datasets/FashionMNIST.cs View File

@@ -1,11 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Datasets
{
public class FashionMNIST
{
public static ((Tensor, Tensor), (Tensor, Tensor)) load_data() => throw new NotImplementedException();
}
}

+ 0
- 15
src/TensorFlowNET.Keras/Datasets/IMDB.cs View File

@@ -1,15 +0,0 @@
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Datasets
{
public class IMDB
{
public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path= "imdb.npz", int? num_words= null, int skip_top= 0, int? maxlen= null,
int seed= 113,int start_char= 1, int oov_char= 2, int index_from= 3) => throw new NotImplementedException();

public static JObject get_word_index(string path= "imdb_word_index.json") => throw new NotImplementedException();
}
}

src/TensorFlowNET.Core/Keras/Datasets/KerasDataset.cs → src/TensorFlowNET.Keras/Datasets/KerasDataset.cs View File


+ 68
- 5
src/TensorFlowNET.Keras/Datasets/MNIST.cs View File

@@ -1,11 +1,74 @@
using System;
using System.Collections.Generic;
using System.Text;
/*****************************************************************************
Copyright 2020 Haiping Chen. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/

using NumSharp;
using System;
using System.IO;
using System.Net;

namespace Tensorflow.Keras.Datasets
{
public class MNIST
public class Mnist
{
public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path = "mnist.npz") => throw new NotImplementedException();
string origin_folder = "https://storage.googleapis.com/tensorflow/tf-keras-datasets/";
string file_name = "mnist.npz";

/// <summary>
/// Loads the [MNIST dataset](http://yann.lecun.com/exdb/mnist/).
/// </summary>
/// <returns></returns>
public DatasetPass load_data()
{
var file = Download();
var bytes = File.ReadAllBytes(file);
var datax = LoadX(bytes);
var datay = LoadY(bytes);
return new DatasetPass
{
Train = (datax.Item1, datay.Item1),
Test = (datax.Item2, datay.Item2)
};
}

(NDArray, NDArray) LoadX(byte[] bytes)
{
var y = np.Load_Npz<byte[,,]>(bytes);
return (y["x_train.npy"], y["x_test.npy"]);
}

(NDArray, NDArray) LoadY(byte[] bytes)
{
var y = np.Load_Npz<byte[]>(bytes);
return (y["y_train.npy"], y["y_test.npy"]);
}

string Download()
{
var fileSaveTo = Path.Combine(Path.GetTempPath(), file_name);

if (File.Exists(fileSaveTo))
{
Console.WriteLine($"The file {fileSaveTo} already exists");
return fileSaveTo;
}

using var wc = new WebClient();
wc.DownloadFileTaskAsync(origin_folder + file_name, fileSaveTo).Wait();

return fileSaveTo;
}
}
}

+ 0
- 12
src/TensorFlowNET.Keras/Datasets/Reuters.cs View File

@@ -1,12 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Datasets
{
public class Reuters
{
public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path = "reuters.npz", int? num_words= null, int skip_top= 0,
int? maxlen= null,float test_split= 0.2f, int seed= 113,int start_char= 1,int oov_char= 2,int index_from= 3) => throw new NotImplementedException();
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/DistributedTrainingUtils.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class DistributedTrainingUtils
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasCorrectnessTestBase.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasCorrectnessTestBase
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasDnnCorrectnessTest.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasDnnCorrectnessTest
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasEmbeddingModelCorrectnessTest.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasEmbeddingModelCorrectnessTest
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasImageModelCorrectnessTest.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasImageModelCorrectnessTest
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasOptimizerV2Test.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasOptimizerV2Test
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasPremadeModelsTest.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasPremadeModelsTest
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasRnnModelCorrectnessTest.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasRnnModelCorrectnessTest
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasStatefulLstmModelCorrectnessTest.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasStatefulLstmModelCorrectnessTest
{
}
}

+ 0
- 10
src/TensorFlowNET.Keras/Distribute/KerasUtilsTest.cs View File

@@ -1,10 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.Distribute
{
class KerasUtilsTest
{
}
}

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save