From 006eeaa454fe574e3f55cced2319260a3c67b0fc Mon Sep 17 00:00:00 2001 From: Oceania2018 Date: Sun, 25 Oct 2020 18:44:11 -0500 Subject: [PATCH] Add Metrics architecture. --- src/TensorFlowNET.Core/Binding.Util.cs | 16 ++++-- .../Keras/ArgsDefinition/OptimizerV2Args.cs | 15 +++++ .../Keras/ArgsDefinition/RMSpropArgs.cs | 4 +- .../Keras/Engine/Container.cs | 17 ++++++ .../Keras/Engine/Functional.cs | 28 ++++++++++ .../Keras/Engine/Layer.FlattenLayers.cs | 29 ++++++++++ .../Keras/Engine/Layer.Layers.cs | 2 +- .../Keras/Engine/Layer.State.cs | 16 ++++-- .../Keras/Engine/LossesContainer.cs | 25 +++++++++ .../Keras/Engine/MetricsContainer.cs | 20 +++++++ src/TensorFlowNET.Core/Keras/Engine/Model.cs | 55 ++++++++++++++++++- src/TensorFlowNET.Core/Keras/Metrics/Mean.cs | 19 +++++++ .../Keras/Metrics/Metric.cs | 50 +++++++++++++++++ .../Keras/Metrics/Reduce.cs | 28 ++++++++++ src/TensorFlowNET.Core/Keras/Metrics/Sum.cs | 10 ++++ .../Keras/Optimizers/Adam.cs | 3 +- .../Keras/Optimizers/OptimizerV2.cs | 8 ++- .../Keras/Optimizers/RMSprop.cs | 4 +- .../Keras/Optimizers/SGD.cs | 3 +- .../Operations/Losses/Reduction.cs | 4 +- .../Operations/Losses/losses_impl.py.cs | 2 +- .../Tensorflow.Binding.csproj | 6 +- 22 files changed, 339 insertions(+), 25 deletions(-) create mode 100644 src/TensorFlowNET.Core/Keras/ArgsDefinition/OptimizerV2Args.cs create mode 100644 src/TensorFlowNET.Core/Keras/Engine/Container.cs create mode 100644 src/TensorFlowNET.Core/Keras/Engine/Layer.FlattenLayers.cs create mode 100644 src/TensorFlowNET.Core/Keras/Engine/LossesContainer.cs create mode 100644 src/TensorFlowNET.Core/Keras/Engine/MetricsContainer.cs create mode 100644 src/TensorFlowNET.Core/Keras/Metrics/Mean.cs create mode 100644 src/TensorFlowNET.Core/Keras/Metrics/Metric.cs create mode 100644 src/TensorFlowNET.Core/Keras/Metrics/Reduce.cs create mode 100644 src/TensorFlowNET.Core/Keras/Metrics/Sum.cs diff --git a/src/TensorFlowNET.Core/Binding.Util.cs b/src/TensorFlowNET.Core/Binding.Util.cs index d8732224..3dcd1ad7 100644 --- a/src/TensorFlowNET.Core/Binding.Util.cs +++ b/src/TensorFlowNET.Core/Binding.Util.cs @@ -424,24 +424,30 @@ namespace Tensorflow return true; } + public static void extendleft(this Queue queue, IEnumerable elements) + { + foreach (var element in elements.Reverse()) + queue.Enqueue(element); + } + public static bool empty(this Queue queue) => queue.Count == 0; - public static TValue SetDefault(this Dictionary dic, TKey key, TValue value) + public static TValue SetDefault(this Dictionary dic, TKey key, TValue defaultValue) { if (dic.ContainsKey(key)) return dic[key]; - dic[key] = value; - return value; + dic[key] = defaultValue; + return defaultValue; } - public static TValue Get(this Dictionary dic, TKey key, TValue value) + public static TValue Get(this Dictionary dic, TKey key, TValue defaultValue) { if (dic.ContainsKey(key)) return dic[key]; - return value; + return defaultValue; } } } diff --git a/src/TensorFlowNET.Core/Keras/ArgsDefinition/OptimizerV2Args.cs b/src/TensorFlowNET.Core/Keras/ArgsDefinition/OptimizerV2Args.cs new file mode 100644 index 00000000..c138f5d1 --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/ArgsDefinition/OptimizerV2Args.cs @@ -0,0 +1,15 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Tensorflow.Keras.ArgsDefinition +{ + public class OptimizerV2Args + { + public string Name { get; set; } + public float LearningRate { get; set; } = 0.001f; + public float InitialDecay { get; set; } + public float ClipNorm { get; set; } + public float ClipValue { get; set; } + } +} diff --git a/src/TensorFlowNET.Core/Keras/ArgsDefinition/RMSpropArgs.cs b/src/TensorFlowNET.Core/Keras/ArgsDefinition/RMSpropArgs.cs index 42a5bcb1..cca1bf16 100644 --- a/src/TensorFlowNET.Core/Keras/ArgsDefinition/RMSpropArgs.cs +++ b/src/TensorFlowNET.Core/Keras/ArgsDefinition/RMSpropArgs.cs @@ -4,13 +4,11 @@ using System.Text; namespace Tensorflow.Keras.ArgsDefinition { - public class RMSpropArgs + public class RMSpropArgs : OptimizerV2Args { - public float LearningRate { get; set; } = 0.001f; public float RHO { get; set; } = 0.9f; public float Momentum { get; set; } = 0.0f; public float Epsilon { get; set; } = 1e-7f; public bool Centered { get; set; } = false; - public string Name { get; set; } = "RMSprop"; } } diff --git a/src/TensorFlowNET.Core/Keras/Engine/Container.cs b/src/TensorFlowNET.Core/Keras/Engine/Container.cs new file mode 100644 index 00000000..f8e6c0f4 --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Engine/Container.cs @@ -0,0 +1,17 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Tensorflow.Keras.Engine +{ + public class Container + { + protected string[] _output_names; + protected bool _built; + + public Container(string[] output_names) + { + _output_names = output_names; + } + } +} diff --git a/src/TensorFlowNET.Core/Keras/Engine/Functional.cs b/src/TensorFlowNET.Core/Keras/Engine/Functional.cs index c88fea71..0a1c9464 100644 --- a/src/TensorFlowNET.Core/Keras/Engine/Functional.cs +++ b/src/TensorFlowNET.Core/Keras/Engine/Functional.cs @@ -96,9 +96,37 @@ namespace Tensorflow.Keras.Engine NodesByDepth = nodes_by_depth; _layers = layers; + // Build self.input_names and self.output_names. + _set_output_names(); + ComputeTensorUsageCount(); } + /// + /// Assigns unique names to the Network's outputs. + /// + void _set_output_names() + { + var uniquified = new List(); + var output_names = new List(); + var prefix_count = new Dictionary(); + + foreach (var layer in _output_layers) + { + var proposal = layer.Name; + while (output_names.Contains(proposal)) + { + var existing_count = prefix_count.Get(layer.Name, 1); + proposal = $"{layer.Name}_{existing_count}"; + prefix_count[layer.Name] = existing_count + 1; + } + output_names.add(proposal); + uniquified.append(proposal); + } + + this.output_names = uniquified.ToArray(); + } + void ComputeTensorUsageCount() { var available_tensors = inputs.Select(x => x.GetHashCode()).ToList(); diff --git a/src/TensorFlowNET.Core/Keras/Engine/Layer.FlattenLayers.cs b/src/TensorFlowNET.Core/Keras/Engine/Layer.FlattenLayers.cs new file mode 100644 index 00000000..48720e6f --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Engine/Layer.FlattenLayers.cs @@ -0,0 +1,29 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Tensorflow.Keras.Engine +{ + public partial class Layer + { + public IEnumerable _flatten_layers(bool recursive = true, bool include_self = true) + { + if (include_self) + yield return this; + + var seen_object_ids = new List(); + var deque = new Queue(_layers); + while (!deque.empty()) + { + var layer_or_container = deque.Dequeue(); + var layer_or_container_id = layer_or_container.GetHashCode(); + if (seen_object_ids.Contains(layer_or_container_id)) + continue; + seen_object_ids.Add(layer_or_container_id); + yield return layer_or_container; + if (recursive) + deque.extendleft(layer_or_container._layers); + } + } + } +} diff --git a/src/TensorFlowNET.Core/Keras/Engine/Layer.Layers.cs b/src/TensorFlowNET.Core/Keras/Engine/Layer.Layers.cs index cccb605b..6c13e56a 100644 --- a/src/TensorFlowNET.Core/Keras/Engine/Layer.Layers.cs +++ b/src/TensorFlowNET.Core/Keras/Engine/Layer.Layers.cs @@ -12,7 +12,7 @@ namespace Tensorflow.Keras.Engine { protected List _layers = new List(); public List Layers => _layers; - + protected Layer Dense(int units, Activation activation = null, TensorShape input_shape = null) diff --git a/src/TensorFlowNET.Core/Keras/Engine/Layer.State.cs b/src/TensorFlowNET.Core/Keras/Engine/Layer.State.cs index bb2036a5..947767e6 100644 --- a/src/TensorFlowNET.Core/Keras/Engine/Layer.State.cs +++ b/src/TensorFlowNET.Core/Keras/Engine/Layer.State.cs @@ -6,11 +6,19 @@ namespace Tensorflow.Keras.Engine { public partial class Layer { - Dictionary trainable_state; - Dictionary _get_trainable_state() + protected Dictionary trainable_state; + protected Dictionary _compiled_trainable_state; + + /// + /// Get the `trainable` state of each sublayer. + /// + /// + protected Dictionary _get_trainable_state() { - trainable_state = new Dictionary(); - throw new NotImplementedException(""); + trainable_state = new Dictionary(); + foreach (var layer in _flatten_layers()) + trainable_state[layer] = layer.Trainable; + return trainable_state; } void _set_trainable_state(Dictionary trainable_state) diff --git a/src/TensorFlowNET.Core/Keras/Engine/LossesContainer.cs b/src/TensorFlowNET.Core/Keras/Engine/LossesContainer.cs new file mode 100644 index 00000000..9ee4059d --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Engine/LossesContainer.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; +using System.Text; +using Tensorflow.Keras.ArgsDefinition; +using Tensorflow.Keras.Losses; +using Tensorflow.Keras.Metrics; + +namespace Tensorflow.Keras.Engine +{ + public class LossesContainer : Container + { + ILossFunc _user_losses; + ILossFunc _losses; + Mean _loss_metric; + + public LossesContainer(ILossFunc losses, string[] output_names = null) + : base(output_names) + { + _user_losses = losses; + _losses = losses; + _loss_metric = new Mean(name: "loss"); + _built = false; + } + } +} diff --git a/src/TensorFlowNET.Core/Keras/Engine/MetricsContainer.cs b/src/TensorFlowNET.Core/Keras/Engine/MetricsContainer.cs new file mode 100644 index 00000000..c494ec6f --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Engine/MetricsContainer.cs @@ -0,0 +1,20 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Tensorflow.Keras.Engine +{ + public class MetricsContainer : Container + { + string[] _user_metrics; + string[] _metrics; + + public MetricsContainer(string[] metrics, string[] output_names = null) + : base(output_names) + { + _user_metrics = metrics; + _metrics = metrics; + _built = false; + } + } +} diff --git a/src/TensorFlowNET.Core/Keras/Engine/Model.cs b/src/TensorFlowNET.Core/Keras/Engine/Model.cs index 2b78aa2f..c57f2d83 100644 --- a/src/TensorFlowNET.Core/Keras/Engine/Model.cs +++ b/src/TensorFlowNET.Core/Keras/Engine/Model.cs @@ -4,6 +4,7 @@ using Tensorflow.Keras.ArgsDefinition; using Tensorflow.Keras.Engine.DataAdapters; using Tensorflow.Keras.Losses; using Tensorflow.Keras.Optimizers; +using NumSharp; namespace Tensorflow.Keras.Engine { @@ -20,12 +21,17 @@ namespace Tensorflow.Keras.Engine bool _is_compiled; #pragma warning restore CS0414 // The field 'Model._is_compiled' is assigned but its value is never used #pragma warning restore CS0108 // Member hides inherited member; missing new keyword - string loss; + ILossFunc loss; IOptimizer optimizer; IVariableV1 _steps_per_execution; protected bool _is_graph_network; protected Tensors inputs; protected Tensors outputs; + public string[] output_names; + IVariableV1 _train_counter; + IVariableV1 _test_counter; + IVariableV1 _predict_counter; + bool _base_model_initialized; public Model(ModelArgs args) : base(args) @@ -35,7 +41,17 @@ namespace Tensorflow.Keras.Engine public void compile(ILossFunc loss, OptimizerV2 optimizer, string[] metrics) { + this.optimizer = optimizer; + var compiled_loss = new LossesContainer(loss, output_names: output_names); + var compiled_metrics = new MetricsContainer(metrics, output_names: output_names); + int experimental_steps_per_execution = 1; + _configure_steps_per_execution(experimental_steps_per_execution); + + // Initialize cache attrs. + _reset_compile_cache(); + _is_compiled = true; + this.loss = loss; } public void compile(string optimizerName, string lossName) @@ -55,10 +71,29 @@ namespace Tensorflow.Keras.Engine _reset_compile_cache(); - loss = lossName; _is_compiled = true; } + /// + /// Trains the model for a fixed number of epochs (iterations on a dataset). + /// + /// + /// + /// + /// + /// + /// + /// + public void fit(NDArray x, NDArray y, + int batch_size = -1, + int epochs = 1, + int verbose = 1, + float validation_split = 0f, + bool shuffle = true) + { + + } + void _configure_steps_per_execution(int steps_per_execution) { _steps_per_execution = tf.Variable(steps_per_execution, @@ -68,7 +103,23 @@ namespace Tensorflow.Keras.Engine void _reset_compile_cache() { + // Used to cache `trainable` attr of `Layer`s for `fit`. + _compiled_trainable_state = _get_trainable_state(); + } + + void _init_batch_counters() + { + _train_counter = tf.Variable(0, + dtype: TF_DataType.TF_INT64, + aggregation: VariableAggregation.OnlyFirstReplica); + _test_counter = tf.Variable(0, + dtype: TF_DataType.TF_INT64, + aggregation: VariableAggregation.OnlyFirstReplica); + + _predict_counter = tf.Variable(0, + dtype: TF_DataType.TF_INT64, + aggregation: VariableAggregation.OnlyFirstReplica); } public void compile(string optimizerName, ILossFunc lossName) diff --git a/src/TensorFlowNET.Core/Keras/Metrics/Mean.cs b/src/TensorFlowNET.Core/Keras/Metrics/Mean.cs new file mode 100644 index 00000000..b1f9c419 --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Metrics/Mean.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Text; +using Tensorflow.Keras.ArgsDefinition; + +namespace Tensorflow.Keras.Metrics +{ + /// + /// Computes the (weighted) mean of the given values. + /// + public class Mean : Reduce + { + public Mean(string name = "mean", TF_DataType dtype = TF_DataType.DtInvalid) + : base(Reduction.WEIGHTED_MEAN, name, dtype: dtype) + { + + } + } +} diff --git a/src/TensorFlowNET.Core/Keras/Metrics/Metric.cs b/src/TensorFlowNET.Core/Keras/Metrics/Metric.cs new file mode 100644 index 00000000..f5ae28bd --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Metrics/Metric.cs @@ -0,0 +1,50 @@ +using System; +using System.Collections.Generic; +using System.Text; +using Tensorflow.Keras.ArgsDefinition; +using Tensorflow.Keras.Engine; +using static Tensorflow.Binding; + +namespace Tensorflow.Keras.Metrics +{ + /// + /// Encapsulates metric logic and state. + /// + public class Metric : Layer + { + public Metric(string name = null, TF_DataType dtype = TF_DataType.DtInvalid) + : base(new LayerArgs + { + Name = name, + DType = dtype + }) + { + stateful = true; + built = true; + } + + protected override IVariableV1 add_weight(string name, + TensorShape shape = null, + TF_DataType dtype = TF_DataType.TF_FLOAT, + IInitializer initializer = null, + IRegularizer regularizer = null, + VariableSynchronization synchronization = VariableSynchronization.OnRead, + VariableAggregation aggregation = VariableAggregation.Sum, + bool trainable = true, + Func getter = null) + { + if (shape == null) + shape = new TensorShape(new int[0]); + + return tf_with(ops.init_scope(), delegate + { + return base.add_weight(name, shape, + dtype: dtype, + trainable: false, + initializer: initializer, + synchronization: synchronization, + aggregation: aggregation); + }); + } + } +} diff --git a/src/TensorFlowNET.Core/Keras/Metrics/Reduce.cs b/src/TensorFlowNET.Core/Keras/Metrics/Reduce.cs new file mode 100644 index 00000000..79613b2d --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Metrics/Reduce.cs @@ -0,0 +1,28 @@ +using System; +using System.Collections.Generic; +using System.Text; +using Tensorflow.Keras.ArgsDefinition; +using static Tensorflow.Binding; + +namespace Tensorflow.Keras.Metrics +{ + /// + /// Encapsulates metrics that perform a reduce operation on the values. + /// + public class Reduce : Metric + { + IVariableV1 total; + IVariableV1 count; + public Reduce(string reduction, string name, TF_DataType dtype = TF_DataType.DtInvalid) + : base(name: name, dtype: dtype) + { + total = add_weight("total", initializer: tf.zeros_initializer); + + if (reduction == Reduction.WEIGHTED_MEAN || + reduction == Reduction.SUM_OVER_BATCH_SIZE) + { + count = add_weight("count", initializer: tf.zeros_initializer); + } + } + } +} diff --git a/src/TensorFlowNET.Core/Keras/Metrics/Sum.cs b/src/TensorFlowNET.Core/Keras/Metrics/Sum.cs new file mode 100644 index 00000000..10396867 --- /dev/null +++ b/src/TensorFlowNET.Core/Keras/Metrics/Sum.cs @@ -0,0 +1,10 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Tensorflow.Keras.Metrics +{ + class Sum + { + } +} diff --git a/src/TensorFlowNET.Core/Keras/Optimizers/Adam.cs b/src/TensorFlowNET.Core/Keras/Optimizers/Adam.cs index bd5c3a96..a4df550b 100644 --- a/src/TensorFlowNET.Core/Keras/Optimizers/Adam.cs +++ b/src/TensorFlowNET.Core/Keras/Optimizers/Adam.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.Linq; using System.Text; using Tensorflow.Eager; +using Tensorflow.Keras.ArgsDefinition; namespace Tensorflow.Keras.Optimizers { @@ -22,7 +23,7 @@ namespace Tensorflow.Keras.Optimizers float beta_2 = 0.999f, float epsilon = 1e-7f, bool amsgrad = false, - string name = "Adam") + string name = "Adam") : base(new OptimizerV2Args { }) { _set_hyper("learning_rate", learning_rate); // _set_hyper("decay", _initial_decay); diff --git a/src/TensorFlowNET.Core/Keras/Optimizers/OptimizerV2.cs b/src/TensorFlowNET.Core/Keras/Optimizers/OptimizerV2.cs index 4f5d2545..54f217e3 100644 --- a/src/TensorFlowNET.Core/Keras/Optimizers/OptimizerV2.cs +++ b/src/TensorFlowNET.Core/Keras/Optimizers/OptimizerV2.cs @@ -7,6 +7,7 @@ using Tensorflow.Train; using static Tensorflow.Binding; using Tensorflow; using Tensorflow.Eager; +using Tensorflow.Keras.ArgsDefinition; namespace Tensorflow.Keras.Optimizers { @@ -15,6 +16,7 @@ namespace Tensorflow.Keras.Optimizers /// public class OptimizerV2 : Trackable, IOptimizer { + OptimizerV2Args args; protected bool _hypers_created; protected virtual string _name { get; } @@ -30,13 +32,17 @@ namespace Tensorflow.Keras.Optimizers Dictionary> _slots; List _slot_names; - public OptimizerV2() : base() + public OptimizerV2(OptimizerV2Args args) : base() { + this.args = args; _weights = new List(); _hyper = new Dictionary(); _hyper_variables = new Dictionary(); _slots = new Dictionary>(); _slot_names = new List(); + + _set_hyper("learning_rate", args.LearningRate); + _set_hyper("decay", args.InitialDecay); } public void apply_gradients((Tensor, ResourceVariable) grads_and_vars, diff --git a/src/TensorFlowNET.Core/Keras/Optimizers/RMSprop.cs b/src/TensorFlowNET.Core/Keras/Optimizers/RMSprop.cs index 8a08282f..4bd43873 100644 --- a/src/TensorFlowNET.Core/Keras/Optimizers/RMSprop.cs +++ b/src/TensorFlowNET.Core/Keras/Optimizers/RMSprop.cs @@ -12,9 +12,11 @@ namespace Tensorflow.Keras.Optimizers { RMSpropArgs args; - public RMSprop(RMSpropArgs args) + public RMSprop(RMSpropArgs args) : base(args) { this.args = args; + _set_hyper("rho", args.RHO); + _set_hyper("momentum", args.Momentum); } } } diff --git a/src/TensorFlowNET.Core/Keras/Optimizers/SGD.cs b/src/TensorFlowNET.Core/Keras/Optimizers/SGD.cs index 8ac1aa5c..d207407f 100644 --- a/src/TensorFlowNET.Core/Keras/Optimizers/SGD.cs +++ b/src/TensorFlowNET.Core/Keras/Optimizers/SGD.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.Linq; using System.Text; using Tensorflow.Eager; +using Tensorflow.Keras.ArgsDefinition; namespace Tensorflow.Keras.Optimizers { @@ -17,7 +18,7 @@ namespace Tensorflow.Keras.Optimizers public SGD(float learning_rate, float momentum = 0.0f, bool nesterov = false, - float decay = 0.0f) : base() + float decay = 0.0f) : base(new OptimizerV2Args { }) { _set_hyper("learning_rate", learning_rate); _set_hyper("decay", decay); diff --git a/src/TensorFlowNET.Core/Operations/Losses/Reduction.cs b/src/TensorFlowNET.Core/Operations/Losses/Reduction.cs index 1531848c..0a93ae92 100644 --- a/src/TensorFlowNET.Core/Operations/Losses/Reduction.cs +++ b/src/TensorFlowNET.Core/Operations/Losses/Reduction.cs @@ -3,9 +3,9 @@ public class Reduction { public const string NONE = "none"; - public const string SUM = "weighted_sum"; + public const string WEIGHTED_SUM = "weighted_sum"; public const string SUM_OVER_BATCH_SIZE = "weighted_sum_over_batch_size"; - public const string MEAN = "weighted_mean"; + public const string WEIGHTED_MEAN = "weighted_mean"; public const string SUM_BY_NONZERO_WEIGHTS = "weighted_sum_by_nonzero_weights"; public const string SUM_OVER_NONZERO_WEIGHTS = SUM_BY_NONZERO_WEIGHTS; } diff --git a/src/TensorFlowNET.Core/Operations/Losses/losses_impl.py.cs b/src/TensorFlowNET.Core/Operations/Losses/losses_impl.py.cs index 1f4ce2d8..783a20da 100644 --- a/src/TensorFlowNET.Core/Operations/Losses/losses_impl.py.cs +++ b/src/TensorFlowNET.Core/Operations/Losses/losses_impl.py.cs @@ -47,7 +47,7 @@ namespace Tensorflow else { loss = math_ops.reduce_sum(weighted_losses); - if (reduction == Reduction.MEAN) + if (reduction == Reduction.WEIGHTED_MEAN) loss = _safe_mean( loss, math_ops.reduce_sum(array_ops.ones_like(losses) * weights)); else if (reduction == Reduction.SUM_BY_NONZERO_WEIGHTS || diff --git a/src/TensorFlowNET.Core/Tensorflow.Binding.csproj b/src/TensorFlowNET.Core/Tensorflow.Binding.csproj index 4530830d..76e80fe5 100644 --- a/src/TensorFlowNET.Core/Tensorflow.Binding.csproj +++ b/src/TensorFlowNET.Core/Tensorflow.Binding.csproj @@ -5,7 +5,7 @@ TensorFlow.NET Tensorflow 2.2.0 - 0.21.0 + 0.30.0 8.0 Haiping Chen, Meinrad Recheis, Eli Belash SciSharp STACK @@ -19,14 +19,14 @@ Google's TensorFlow full binding in .NET Standard. Building, training and infering deep learning models. https://tensorflownet.readthedocs.io - 0.21.0.0 + 0.30.0.0 tf.net 0.20.x and above are based on tensorflow native 2.x. * Eager Mode is added finally. * tf.keras is partially working. * tf.data is added. * autograph works partially. - 0.21.0.0 + 0.30.0.0 LICENSE true true