diff --git a/src/TensorFlowNET.Keras/Activations/Activations.Softmax.cs b/src/TensorFlowNET.Keras/Activations/Activations.Softmax.cs new file mode 100644 index 00000000..02d86ace --- /dev/null +++ b/src/TensorFlowNET.Keras/Activations/Activations.Softmax.cs @@ -0,0 +1,11 @@ +using System; +using static Tensorflow.Binding; + +namespace Tensorflow.Keras +{ + public partial class Activations + { + public Activation Softmax = (features, name) + => tf.Context.ExecuteOp("Softmax", name, new ExecuteOpArgs(features)); + } +} diff --git a/src/TensorFlowNET.Keras/Layers/LayersApi.cs b/src/TensorFlowNET.Keras/Layers/LayersApi.cs index ed2f91d9..6ffde8ef 100644 --- a/src/TensorFlowNET.Keras/Layers/LayersApi.cs +++ b/src/TensorFlowNET.Keras/Layers/LayersApi.cs @@ -1,4 +1,5 @@ -using Tensorflow.NumPy; +using System; +using Tensorflow.NumPy; using System.Collections.Generic; using Tensorflow.Keras.ArgsDefinition; using Tensorflow.Keras.Engine; @@ -834,7 +835,8 @@ namespace Tensorflow.Keras.Layers "relu" => keras.activations.Relu, "sigmoid" => keras.activations.Sigmoid, "tanh" => keras.activations.Tanh, - _ => keras.activations.Linear + "softmax" => keras.activations.Softmax, + _ => throw new Exception($"Activation {name} not found") }; ///