Browse Source

Automatically add KerasInterface to f.

pull/998/head
Yaohui Liu 2 years ago
parent
commit
469d15e063
No known key found for this signature in database GPG Key ID: E86D01E1809BD23E
12 changed files with 132 additions and 28 deletions
  1. +0
    -2
      src/TensorFlowNET.Console/Program.cs
  2. +39
    -5
      src/TensorFlowNET.Core/Keras/IKerasApi.cs
  3. +47
    -0
      src/TensorFlowNET.Core/Keras/IOptimizerApi.cs
  4. +12
    -0
      src/TensorFlowNET.Core/Keras/Models/IModelsApi.cs
  5. +0
    -8
      src/TensorFlowNET.Core/tensorflow.cs
  6. +1
    -1
      src/TensorFlowNET.Keras/KerasApi.cs
  7. +24
    -2
      src/TensorFlowNET.Keras/KerasInterface.cs
  8. +2
    -2
      src/TensorFlowNET.Keras/Models/ModelsApi.cs
  9. +5
    -4
      src/TensorFlowNET.Keras/Optimizers/OptimizerApi.cs
  10. +0
    -2
      test/TensorFlowNET.Keras.UnitTest/EagerModeTestBase.cs
  11. +0
    -1
      test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs
  12. +2
    -1
      test/TensorFlowNET.Keras.UnitTest/Layers/ModelSaveTest.cs

+ 0
- 2
src/TensorFlowNET.Console/Program.cs View File

@@ -8,8 +8,6 @@ namespace Tensorflow
{
static void Main(string[] args)
{
tf.UseKeras<KerasInterface>();

var diag = new Diagnostician();
// diag.Diagnose(@"D:\memory.txt");



+ 39
- 5
src/TensorFlowNET.Core/Keras/IKerasApi.cs View File

@@ -1,19 +1,24 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using Tensorflow.Framework.Models;
using Tensorflow.Keras.Engine;
using Tensorflow.Keras.Layers;
using Tensorflow.Keras.Losses;
using Tensorflow.Keras.Metrics;
using Tensorflow.Keras.Models;

namespace Tensorflow.Keras
{
public interface IKerasApi
{
public ILayersApi layers { get; }
public ILossesApi losses { get; }
public IMetricsApi metrics { get; }
public IInitializersApi initializers { get; }
IInitializersApi initializers { get; }
ILayersApi layers { get; }
ILossesApi losses { get; }
IOptimizerApi optimizers { get; }
IMetricsApi metrics { get; }
IModelsApi models { get; }

/// <summary>
/// `Model` groups layers into an object with training and inference features.
@@ -21,6 +26,35 @@ namespace Tensorflow.Keras
/// <param name="input"></param>
/// <param name="output"></param>
/// <returns></returns>
public IModel Model(Tensors inputs, Tensors outputs, string name = null);
IModel Model(Tensors inputs, Tensors outputs, string name = null);

/// <summary>
/// Instantiate a Keras tensor.
/// </summary>
/// <param name="shape"></param>
/// <param name="batch_size"></param>
/// <param name="dtype"></param>
/// <param name="name"></param>
/// <param name="sparse">
/// A boolean specifying whether the placeholder to be created is sparse.
/// </param>
/// <param name="ragged">
/// A boolean specifying whether the placeholder to be created is ragged.
/// </param>
/// <param name="tensor">
/// Optional existing tensor to wrap into the `Input` layer.
/// If set, the layer will not create a placeholder tensor.
/// </param>
/// <returns></returns>
Tensors Input(Shape shape = null,
int batch_size = -1,
string name = null,
TF_DataType dtype = TF_DataType.DtInvalid,
bool sparse = false,
Tensor tensor = null,
bool ragged = false,
TypeSpec type_spec = null,
Shape batch_input_shape = null,
Shape batch_shape = null);
}
}

+ 47
- 0
src/TensorFlowNET.Core/Keras/IOptimizerApi.cs View File

@@ -0,0 +1,47 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.Engine;

namespace Tensorflow.Keras
{
public interface IOptimizerApi
{
/// <summary>
/// Adam optimization is a stochastic gradient descent method that is based on
/// adaptive estimation of first-order and second-order moments.
/// </summary>
/// <param name="learning_rate"></param>
/// <param name="beta_1"></param>
/// <param name="beta_2"></param>
/// <param name="epsilon"></param>
/// <param name="amsgrad"></param>
/// <param name="name"></param>
/// <returns></returns>
IOptimizer Adam(float learning_rate = 0.001f,
float beta_1 = 0.9f,
float beta_2 = 0.999f,
float epsilon = 1e-7f,
bool amsgrad = false,
string name = "Adam");

/// <summary>
/// Construct a new RMSprop optimizer.
/// </summary>
/// <param name="learning_rate"></param>
/// <param name="rho"></param>
/// <param name="momentum"></param>
/// <param name="epsilon"></param>
/// <param name="centered"></param>
/// <param name="name"></param>
/// <returns></returns>
IOptimizer RMSprop(float learning_rate = 0.001f,
float rho = 0.9f,
float momentum = 0.0f,
float epsilon = 1e-7f,
bool centered = false,
string name = "RMSprop");

IOptimizer SGD(float learning_rate);
}
}

+ 12
- 0
src/TensorFlowNET.Core/Keras/Models/IModelsApi.cs View File

@@ -0,0 +1,12 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.Engine;

namespace Tensorflow.Keras.Models
{
public interface IModelsApi
{
public IModel load_model(string filepath, bool compile = true, LoadOptions? options = null);
}
}

+ 0
- 8
src/TensorFlowNET.Core/tensorflow.cs View File

@@ -65,14 +65,6 @@ namespace Tensorflow
InitGradientEnvironment();
}

public void UseKeras<T>() where T : IKerasApi, new()
{
if (keras == null)
{
keras = new T();
}
}

public string VERSION => c_api.StringPiece(c_api.TF_Version());

private void InitGradientEnvironment()


+ 1
- 1
src/TensorFlowNET.Keras/KerasApi.cs View File

@@ -7,6 +7,6 @@ namespace Tensorflow
/// </summary>
public static class KerasApi
{
public static KerasInterface keras { get; } = new KerasInterface();
public static KerasInterface keras { get; } = KerasInterface.Instance;
}
}

+ 24
- 2
src/TensorFlowNET.Keras/KerasInterface.cs View File

@@ -18,6 +18,28 @@ namespace Tensorflow.Keras
{
public class KerasInterface : IKerasApi
{
private static KerasInterface _instance = null;
private static readonly object _lock = new object();
private KerasInterface()
{
Tensorflow.Binding.tf.keras = this;
}

public static KerasInterface Instance
{
get
{
lock (_lock)
{
if (_instance is null)
{
_instance = new KerasInterface();
}
return _instance;
}
}
}

public KerasDataset datasets { get; } = new KerasDataset();
public IInitializersApi initializers { get; } = new InitializersApi();
public Regularizers regularizers { get; } = new Regularizers();
@@ -27,9 +49,9 @@ namespace Tensorflow.Keras
public Preprocessing preprocessing { get; } = new Preprocessing();
ThreadLocal<BackendImpl> _backend = new ThreadLocal<BackendImpl>(() => new BackendImpl());
public BackendImpl backend => _backend.Value;
public OptimizerApi optimizers { get; } = new OptimizerApi();
public IOptimizerApi optimizers { get; } = new OptimizerApi();
public IMetricsApi metrics { get; } = new MetricsApi();
public ModelsApi models { get; } = new ModelsApi();
public IModelsApi models { get; } = new ModelsApi();
public KerasUtils utils { get; } = new KerasUtils();

public Sequential Sequential(List<ILayer> layers = null,


+ 2
- 2
src/TensorFlowNET.Keras/Models/ModelsApi.cs View File

@@ -9,12 +9,12 @@ using ThirdParty.Tensorflow.Python.Keras.Protobuf;

namespace Tensorflow.Keras.Models
{
public class ModelsApi
public class ModelsApi: IModelsApi
{
public Functional from_config(ModelConfig config)
=> Functional.from_config(config);

public Model load_model(string filepath, bool compile = true, LoadOptions? options = null)
public IModel load_model(string filepath, bool compile = true, LoadOptions? options = null)
{
return KerasLoadModelUtils.load_model(filepath, compile: compile, options: options) as Model;
}


+ 5
- 4
src/TensorFlowNET.Keras/Optimizers/OptimizerApi.cs View File

@@ -1,8 +1,9 @@
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;

namespace Tensorflow.Keras.Optimizers
{
public class OptimizerApi
public class OptimizerApi: IOptimizerApi
{
/// <summary>
/// Adam optimization is a stochastic gradient descent method that is based on
@@ -15,7 +16,7 @@ namespace Tensorflow.Keras.Optimizers
/// <param name="amsgrad"></param>
/// <param name="name"></param>
/// <returns></returns>
public OptimizerV2 Adam(float learning_rate = 0.001f,
public IOptimizer Adam(float learning_rate = 0.001f,
float beta_1 = 0.9f,
float beta_2 = 0.999f,
float epsilon = 1e-7f,
@@ -38,7 +39,7 @@ namespace Tensorflow.Keras.Optimizers
/// <param name="centered"></param>
/// <param name="name"></param>
/// <returns></returns>
public OptimizerV2 RMSprop(float learning_rate = 0.001f,
public IOptimizer RMSprop(float learning_rate = 0.001f,
float rho = 0.9f,
float momentum = 0.0f,
float epsilon = 1e-7f,
@@ -54,7 +55,7 @@ namespace Tensorflow.Keras.Optimizers
Name = name
});

public SGD SGD(float learning_rate)
public IOptimizer SGD(float learning_rate)
=> new SGD(learning_rate);
}
}

+ 0
- 2
test/TensorFlowNET.Keras.UnitTest/EagerModeTestBase.cs View File

@@ -10,8 +10,6 @@ namespace TensorFlowNET.Keras.UnitTest
[TestInitialize]
public void TestInit()
{
tf.UseKeras<KerasInterface>();

if (!tf.executing_eagerly())
tf.enable_eager_execution();
tf.Context.ensure_initialized();


+ 0
- 1
test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs View File

@@ -150,7 +150,6 @@ namespace TensorFlowNET.Keras.UnitTest
[TestMethod, Ignore("WIP")]
public void SimpleRNN()
{
tf.UseKeras<KerasInterface>();
var inputs = np.arange(6 * 10 * 8).reshape((6, 10, 8)).astype(np.float32);
/*var simple_rnn = keras.layers.SimpleRNN(4);
var output = simple_rnn.Apply(inputs);


+ 2
- 1
test/TensorFlowNET.Keras.UnitTest/Layers/ModelSaveTest.cs View File

@@ -3,6 +3,7 @@ using Tensorflow.Keras.Engine;
using System.Diagnostics;
using static Tensorflow.KerasApi;
using Tensorflow.Keras.Saving;
using Tensorflow.Keras.Models;

namespace TensorFlowNET.Keras.UnitTest
{
@@ -18,7 +19,7 @@ namespace TensorFlowNET.Keras.UnitTest
var model = GetFunctionalModel();
var config = model.get_config();
Debug.Assert(config is ModelConfig);
var new_model = keras.models.from_config(config as ModelConfig);
var new_model = new ModelsApi().from_config(config as ModelConfig);
Assert.AreEqual(model.Layers.Count, new_model.Layers.Count);
}



Loading…
Cancel
Save