diff --git a/docs/source/Train.md b/docs/source/Train.md new file mode 100644 index 00000000..c71b31c7 --- /dev/null +++ b/docs/source/Train.md @@ -0,0 +1,5 @@ +# Chapter. Trainer + +### Saver + +The `tf.train.saver` class provides methods to save and restore models. \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst index b0962402..362240de 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -26,4 +26,5 @@ Welcome to TensorFlow.NET's documentation! NameScope ControlDependency Gradient + Train EagerMode \ No newline at end of file diff --git a/test/TensorFlowNET.Examples/LinearRegression.cs b/test/TensorFlowNET.Examples/LinearRegression.cs index a692cd9f..d9fb7702 100644 --- a/test/TensorFlowNET.Examples/LinearRegression.cs +++ b/test/TensorFlowNET.Examples/LinearRegression.cs @@ -61,15 +61,32 @@ namespace TensorFlowNET.Examples sess.run(init); // Fit all training data - for (int i = 0; i < training_epochs; i++) + for (int epoch = 0; epoch < training_epochs; epoch++) { - foreach(var (x, y) in Python.zip(train_X, train_Y)) + foreach (var (x, y) in Python.zip(train_X, train_Y)) { - var feed_dict = new Dictionary(); + sess.run(optimizer, feed_dict: new FeedItem[] + { + new FeedItem(X, x), + new FeedItem(Y, y) + }); + } + + // Display logs per epoch step + if ((epoch + 1) % display_step == 0) + { + var c = sess.run(cost, feed_dict: new FeedItem[] + { + new FeedItem(X, train_X), + new FeedItem(Y, train_Y) + }); - // sess.run(optimizer, feed_dict); + Console.WriteLine($"Epoch: {epoch + 1} cost={c} " + + $"W={sess.run(W)} b={sess.run(b)}"); } } + + Console.WriteLine("Optimization Finished!"); }); } }