Browse Source

model compile overload.

tags/v0.100.5-BERT-load
Haiping Chen 2 years ago
parent
commit
c71745c041
5 changed files with 84 additions and 31 deletions
  1. +7
    -4
      src/TensorFlowNET.Core/Keras/Engine/IModel.cs
  2. +1
    -1
      src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs
  3. +47
    -15
      src/TensorFlowNET.Keras/Engine/Model.Compile.cs
  4. +27
    -10
      src/TensorFlowNET.Keras/Engine/Model.Evaluate.cs
  5. +2
    -1
      src/TensorFlowNET.Keras/Layers/LayersApi.cs

+ 7
- 4
src/TensorFlowNET.Core/Keras/Engine/IModel.cs View File

@@ -1,5 +1,6 @@
using Tensorflow.Functions;
using Tensorflow.Keras.Losses;
using Tensorflow.Keras.Metrics;
using Tensorflow.Keras.Saving;
using Tensorflow.NumPy;

@@ -7,12 +8,14 @@ namespace Tensorflow.Keras.Engine;

public interface IModel : ILayer
{
void compile(IOptimizer optimizer = null,
ILossFunc loss = null,
string[] metrics = null);
void compile(IOptimizer optimizer, ILossFunc loss);
void compile(IOptimizer optimizer, ILossFunc loss, string[] metrics);

void compile(string optimizer, string loss, string[] metrics);

void compile(IOptimizer optimizer, ILossFunc loss, IMetricFunc[] metrics);

ICallback fit(NDArray x, NDArray y,
int batch_size = -1,
int epochs = 1,
@@ -55,7 +58,7 @@ public interface IModel : ILayer
bool skip_mismatch = false,
object options = null);

void evaluate(NDArray x, NDArray y,
Dictionary<string, float> evaluate(NDArray x, NDArray y,
int batch_size = -1,
int verbose = 1,
int steps = -1,


+ 1
- 1
src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs View File

@@ -156,7 +156,7 @@ namespace Tensorflow.Keras.Layers
IInitializer beta_initializer = null,
IInitializer gamma_initializer = null);

public ILayer Normalization(int? axis = -1, float? mean = null, float? variance = null, bool invert = false);
public ILayer Normalization(Shape? input_shape = null, int? axis = -1, float? mean = null, float? variance = null, bool invert = false);
public ILayer LeakyReLU(float alpha = 0.3f);

public ILayer LSTM(int units,


+ 47
- 15
src/TensorFlowNET.Keras/Engine/Model.Compile.cs View File

@@ -10,9 +10,8 @@ namespace Tensorflow.Keras.Engine
LossesContainer compiled_loss;
MetricsContainer compiled_metrics;

public void compile(IOptimizer optimizer = null,
ILossFunc loss = null,
string[] metrics = null)
public void compile(IOptimizer optimizer,
ILossFunc loss)
{
this.optimizer = optimizer ?? new RMSprop(new RMSpropArgs
{
@@ -20,8 +19,8 @@ namespace Tensorflow.Keras.Engine

this.loss = loss ?? new MeanSquaredError();

compiled_loss = new LossesContainer(loss, output_names: output_names);
compiled_metrics = new MetricsContainer(metrics, output_names: output_names);
compiled_loss = new LossesContainer(this.loss, output_names: output_names);
compiled_metrics = new MetricsContainer(new string[0], output_names: output_names);

int experimental_steps_per_execution = 1;
_configure_steps_per_execution(experimental_steps_per_execution);
@@ -31,9 +30,9 @@ namespace Tensorflow.Keras.Engine
_is_compiled = true;
}

public void compile(IOptimizer optimizer = null,
ILossFunc loss = null,
IMetricFunc[] metrics = null)
public void compile(IOptimizer optimizer,
ILossFunc loss,
string[] metrics)
{
this.optimizer = optimizer ?? new RMSprop(new RMSpropArgs
{
@@ -41,7 +40,7 @@ namespace Tensorflow.Keras.Engine

this.loss = loss ?? new MeanSquaredError();

compiled_loss = new LossesContainer(loss, output_names: output_names);
compiled_loss = new LossesContainer(this.loss, output_names: output_names);
compiled_metrics = new MetricsContainer(metrics, output_names: output_names);

int experimental_steps_per_execution = 1;
@@ -52,25 +51,58 @@ namespace Tensorflow.Keras.Engine
_is_compiled = true;
}

public void compile(string optimizer, string loss, string[] metrics)
public void compile(string optimizer,
string loss,
string[] metrics)
{
var _optimizer = optimizer switch
this.optimizer = optimizer switch
{
"rmsprop" => new RMSprop(new RMSpropArgs
{

}),
_ => throw new NotImplementedException("")
_ => new RMSprop(new RMSpropArgs
{
})
};

ILossFunc _loss = loss switch
this.loss = loss switch
{
"mse" => new MeanSquaredError(),
"mae" => new MeanAbsoluteError(),
_ => throw new NotImplementedException("")
_ => new MeanSquaredError()
};

compile(optimizer: _optimizer, loss: _loss, metrics: metrics);
compiled_loss = new LossesContainer(this.loss, output_names: output_names);
compiled_metrics = new MetricsContainer(metrics, output_names: output_names);

int experimental_steps_per_execution = 1;
_configure_steps_per_execution(experimental_steps_per_execution);

// Initialize cache attrs.
_reset_compile_cache();
_is_compiled = true;
}

public void compile(IOptimizer optimizer,
ILossFunc loss,
IMetricFunc[] metrics)
{
this.optimizer = optimizer ?? new RMSprop(new RMSpropArgs
{
});

this.loss = loss ?? new MeanSquaredError();

compiled_loss = new LossesContainer(this.loss, output_names: output_names);
compiled_metrics = new MetricsContainer(metrics, output_names: output_names);

int experimental_steps_per_execution = 1;
_configure_steps_per_execution(experimental_steps_per_execution);

// Initialize cache attrs.
_reset_compile_cache();
_is_compiled = true;
}
}
}

+ 27
- 10
src/TensorFlowNET.Keras/Engine/Model.Evaluate.cs View File

@@ -26,7 +26,7 @@ namespace Tensorflow.Keras.Engine
/// <param name="workers"></param>
/// <param name="use_multiprocessing"></param>
/// <param name="return_dict"></param>
public void evaluate(NDArray x, NDArray y,
public Dictionary<string, float> evaluate(NDArray x, NDArray y,
int batch_size = -1,
int verbose = 1,
int steps = -1,
@@ -63,12 +63,12 @@ namespace Tensorflow.Keras.Engine
});
callbacks.on_test_begin();

IEnumerable<(string, Tensor)> logs = null;
foreach (var (epoch, iterator) in data_handler.enumerate_epochs())
{
reset_metrics();
//callbacks.on_epoch_begin(epoch);
callbacks.on_epoch_begin(epoch);
// data_handler.catch_stop_iteration();
IEnumerable<(string, Tensor)> logs = null;

foreach (var step in data_handler.steps())
{
@@ -78,12 +78,16 @@ namespace Tensorflow.Keras.Engine
callbacks.on_test_batch_end(end_step, logs);
}
}
Console.WriteLine();
GC.Collect();
GC.WaitForPendingFinalizers();

var results = new Dictionary<string, float>();
foreach (var log in logs)
{
results[log.Item1] = (float)log.Item2;
}
return results;
}

public KeyValuePair<string, float>[] evaluate(IDatasetV2 x)
public Dictionary<string, float> evaluate(IDatasetV2 x, int verbose = 1)
{
var data_handler = new DataHandler(new DataHandlerArgs
{
@@ -92,21 +96,34 @@ namespace Tensorflow.Keras.Engine
StepsPerExecution = _steps_per_execution
});

var callbacks = new CallbackList(new CallbackParams
{
Model = this,
Verbose = verbose,
Steps = data_handler.Inferredsteps
});
callbacks.on_test_begin();

IEnumerable<(string, Tensor)> logs = null;
foreach (var (epoch, iterator) in data_handler.enumerate_epochs())
{
reset_metrics();
// callbacks.on_epoch_begin(epoch)
callbacks.on_epoch_begin(epoch);
// data_handler.catch_stop_iteration();


foreach (var step in data_handler.steps())
{
// callbacks.on_train_batch_begin(step)
logs = test_function(data_handler, iterator);
}
}
return logs.Select(x => new KeyValuePair<string, float>(x.Item1, (float)x.Item2)).ToArray();

var results = new Dictionary<string, float>();
foreach (var log in logs)
{
results[log.Item1] = (float)log.Item2;
}
return results;
}

IEnumerable<(string, Tensor)> test_function(DataHandler data_handler, OwnedIterator iterator)


+ 2
- 1
src/TensorFlowNET.Keras/Layers/LayersApi.cs View File

@@ -873,9 +873,10 @@ namespace Tensorflow.Keras.Layers
CountWeights = count_weights
});

public ILayer Normalization(int? axis = -1, float? mean = null, float? variance = null, bool invert = false)
public ILayer Normalization(Shape? input_shape = null, int? axis = -1, float? mean = null, float? variance = null, bool invert = false)
=> new Normalization(new NormalizationArgs
{
InputShape = input_shape,
Axis = axis,
Mean = mean,
Variance = variance,


Loading…
Cancel
Save