@@ -150,7 +150,7 @@ namespace Tensorflow | |||||
var variables = graph.get_collection<IVariableV1>(tf.GraphKeys.GLOBAL_VARIABLES, | var variables = graph.get_collection<IVariableV1>(tf.GraphKeys.GLOBAL_VARIABLES, | ||||
scope: scope_to_prepend_to_names); | scope: scope_to_prepend_to_names); | ||||
var var_list = new Dictionary<string, IVariableV1>(); | var var_list = new Dictionary<string, IVariableV1>(); | ||||
variables.ForEach(v => var_list[ops.strip_name_scope(v.Name, scope_to_prepend_to_names)] = v); | |||||
// variables.ForEach(v => var_list[ops.strip_name_scope(v.Name, scope_to_prepend_to_names)] = v); | |||||
return (var_list, imported_return_elements); | return (var_list, imported_return_elements); | ||||
} | } | ||||
@@ -1,6 +1,7 @@ | |||||
using NumSharp; | using NumSharp; | ||||
using System; | using System; | ||||
using Tensorflow.Keras.ArgsDefinition; | using Tensorflow.Keras.ArgsDefinition; | ||||
using Tensorflow.Keras.Losses; | |||||
using Tensorflow.Keras.Optimizers; | using Tensorflow.Keras.Optimizers; | ||||
namespace Tensorflow.Keras.Engine | namespace Tensorflow.Keras.Engine | ||||
@@ -42,6 +43,11 @@ namespace Tensorflow.Keras.Engine | |||||
// Prepare list of loss functions, same size of model outputs. | // Prepare list of loss functions, same size of model outputs. | ||||
} | } | ||||
public void compile(string optimizerName, ILossFunc lossName) | |||||
{ | |||||
throw new NotImplementedException(""); | |||||
} | |||||
/// <summary> | /// <summary> | ||||
/// Generates output predictions for the input samples. | /// Generates output predictions for the input samples. | ||||
/// </summary> | /// </summary> | ||||
@@ -7,6 +7,7 @@ using Tensorflow.Keras.ArgsDefinition; | |||||
using Tensorflow.Keras.Datasets; | using Tensorflow.Keras.Datasets; | ||||
using Tensorflow.Keras.Engine; | using Tensorflow.Keras.Engine; | ||||
using Tensorflow.Keras.Layers; | using Tensorflow.Keras.Layers; | ||||
using Tensorflow.Keras.Losses; | |||||
using static Tensorflow.Binding; | using static Tensorflow.Binding; | ||||
namespace Tensorflow | namespace Tensorflow | ||||
@@ -16,6 +17,7 @@ namespace Tensorflow | |||||
public KerasDataset datasets { get; } = new KerasDataset(); | public KerasDataset datasets { get; } = new KerasDataset(); | ||||
public Initializers initializers { get; } = new Initializers(); | public Initializers initializers { get; } = new Initializers(); | ||||
public LayersApi layers { get; } = new LayersApi(); | public LayersApi layers { get; } = new LayersApi(); | ||||
public LossesApi losses { get; } = new LossesApi(); | |||||
public Activations activations { get; } = new Activations(); | public Activations activations { get; } = new Activations(); | ||||
public Preprocessing preprocessing { get; } = new Preprocessing(); | public Preprocessing preprocessing { get; } = new Preprocessing(); | ||||
public BackendImpl backend { get; } = new BackendImpl(); | public BackendImpl backend { get; } = new BackendImpl(); | ||||
@@ -69,52 +71,5 @@ namespace Tensorflow | |||||
return layer.InboundNodes[0].Outputs; | return layer.InboundNodes[0].Outputs; | ||||
} | } | ||||
public class LayersApi | |||||
{ | |||||
public Rescaling Rescaling(float scale, | |||||
float offset = 0, | |||||
TensorShape input_shape = null) | |||||
=> new Rescaling(new RescalingArgs | |||||
{ | |||||
Scale = scale, | |||||
Offset = offset, | |||||
InputShape = input_shape | |||||
}); | |||||
public Dense Dense(int units, | |||||
Activation activation = null, | |||||
TensorShape input_shape = null) | |||||
=> new Dense(new DenseArgs | |||||
{ | |||||
Units = units, | |||||
Activation = activation ?? tf.keras.activations.Linear, | |||||
InputShape = input_shape | |||||
}); | |||||
/// <summary> | |||||
/// Turns positive integers (indexes) into dense vectors of fixed size. | |||||
/// </summary> | |||||
/// <param name="input_dim"></param> | |||||
/// <param name="output_dim"></param> | |||||
/// <param name="embeddings_initializer"></param> | |||||
/// <param name="mask_zero"></param> | |||||
/// <returns></returns> | |||||
public Embedding Embedding(int input_dim, | |||||
int output_dim, | |||||
IInitializer embeddings_initializer = null, | |||||
bool mask_zero = false, | |||||
TensorShape input_shape = null, | |||||
int input_length = -1) | |||||
=> new Embedding(new EmbeddingArgs | |||||
{ | |||||
InputDim = input_dim, | |||||
OutputDim = output_dim, | |||||
MaskZero = mask_zero, | |||||
InputShape = input_shape ?? input_length, | |||||
InputLength = input_length, | |||||
EmbeddingsInitializer = embeddings_initializer | |||||
}); | |||||
} | |||||
} | } | ||||
} | } |
@@ -0,0 +1,95 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
using Tensorflow.Keras.ArgsDefinition; | |||||
using Tensorflow.Keras.Engine; | |||||
using static Tensorflow.Binding; | |||||
namespace Tensorflow.Keras.Layers | |||||
{ | |||||
public class LayersApi | |||||
{ | |||||
public Conv2D Conv2D(int filters, | |||||
TensorShape kernel_size = null, | |||||
string padding = "valid", | |||||
string activation = "relu") | |||||
=> new Conv2D(new Conv2DArgs | |||||
{ | |||||
Filters = filters, | |||||
KernelSize = kernel_size, | |||||
Padding = padding, | |||||
Activation = GetActivationByName(activation) | |||||
}); | |||||
public Dense Dense(int units, | |||||
string activation = "linear", | |||||
TensorShape input_shape = null) | |||||
=> new Dense(new DenseArgs | |||||
{ | |||||
Units = units, | |||||
Activation = GetActivationByName(activation), | |||||
InputShape = input_shape | |||||
}); | |||||
/// <summary> | |||||
/// Turns positive integers (indexes) into dense vectors of fixed size. | |||||
/// </summary> | |||||
/// <param name="input_dim"></param> | |||||
/// <param name="output_dim"></param> | |||||
/// <param name="embeddings_initializer"></param> | |||||
/// <param name="mask_zero"></param> | |||||
/// <returns></returns> | |||||
public Embedding Embedding(int input_dim, | |||||
int output_dim, | |||||
IInitializer embeddings_initializer = null, | |||||
bool mask_zero = false, | |||||
TensorShape input_shape = null, | |||||
int input_length = -1) | |||||
=> new Embedding(new EmbeddingArgs | |||||
{ | |||||
InputDim = input_dim, | |||||
OutputDim = output_dim, | |||||
MaskZero = mask_zero, | |||||
InputShape = input_shape ?? input_length, | |||||
InputLength = input_length, | |||||
EmbeddingsInitializer = embeddings_initializer | |||||
}); | |||||
public Flatten Flatten(string data_format = null) | |||||
=> new Flatten(new FlattenArgs | |||||
{ | |||||
DataFormat = data_format | |||||
}); | |||||
public MaxPooling2D MaxPooling2D(TensorShape pool_size = null, | |||||
TensorShape strides = null, | |||||
string padding = "valid") | |||||
=> new MaxPooling2D(new MaxPooling2DArgs | |||||
{ | |||||
PoolSize = pool_size ?? (2, 2), | |||||
Strides = strides, | |||||
Padding = padding | |||||
}); | |||||
public Rescaling Rescaling(float scale, | |||||
float offset = 0, | |||||
TensorShape input_shape = null) | |||||
=> new Rescaling(new RescalingArgs | |||||
{ | |||||
Scale = scale, | |||||
Offset = offset, | |||||
InputShape = input_shape | |||||
}); | |||||
Activation GetActivationByName(string name) | |||||
=> name switch | |||||
{ | |||||
"linear" => tf.keras.activations.Linear, | |||||
"relu" => tf.keras.activations.Relu, | |||||
"sigmoid" => tf.keras.activations.Sigmoid, | |||||
"tanh" => tf.keras.activations.Tanh, | |||||
_ => tf.keras.activations.Linear | |||||
}; | |||||
} | |||||
} |
@@ -30,7 +30,7 @@ namespace Tensorflow.Keras.Layers | |||||
{ | { | ||||
this.args = args; | this.args = args; | ||||
args.PoolSize = conv_utils.normalize_tuple(args.PoolSize, 2, "pool_size"); | args.PoolSize = conv_utils.normalize_tuple(args.PoolSize, 2, "pool_size"); | ||||
args.Strides = conv_utils.normalize_tuple(args.Strides, 2, "strides"); | |||||
args.Strides = conv_utils.normalize_tuple(args.Strides ?? args.PoolSize, 2, "strides"); | |||||
args.Padding = conv_utils.normalize_padding(args.Padding); | args.Padding = conv_utils.normalize_padding(args.Padding); | ||||
args.DataFormat = conv_utils.normalize_data_format(args.DataFormat); | args.DataFormat = conv_utils.normalize_data_format(args.DataFormat); | ||||
input_spec = new InputSpec(ndim: 4); | input_spec = new InputSpec(ndim: 4); | ||||
@@ -23,7 +23,8 @@ namespace Tensorflow.Keras.Layers | |||||
protected override Tensor call(Tensor inputs, bool is_training = false, Tensor state = null) | protected override Tensor call(Tensor inputs, bool is_training = false, Tensor state = null) | ||||
{ | { | ||||
scale = math_ops.cast(args.Scale, args.DType); | scale = math_ops.cast(args.Scale, args.DType); | ||||
throw new NotImplementedException(""); | |||||
offset = math_ops.cast(args.Offset, args.DType); | |||||
return math_ops.cast(inputs, args.DType) * scale + offset; | |||||
} | } | ||||
} | } | ||||
} | } |
@@ -0,0 +1,10 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
namespace Tensorflow.Keras.Losses | |||||
{ | |||||
public interface ILossFunc | |||||
{ | |||||
} | |||||
} |
@@ -0,0 +1,29 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
namespace Tensorflow.Keras.Losses | |||||
{ | |||||
/// <summary> | |||||
/// Loss base class. | |||||
/// </summary> | |||||
public abstract class Loss | |||||
{ | |||||
protected string reduction; | |||||
protected string name; | |||||
bool _allow_sum_over_batch_size; | |||||
string _name_scope; | |||||
public Loss(string reduction = ReductionV2.AUTO, string name = null) | |||||
{ | |||||
this.reduction = reduction; | |||||
this.name = name; | |||||
_allow_sum_over_batch_size = false; | |||||
} | |||||
void _set_name_scope() | |||||
{ | |||||
_name_scope = name; | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,20 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
namespace Tensorflow.Keras.Losses | |||||
{ | |||||
public class LossFunctionWrapper : Loss | |||||
{ | |||||
Action fn; | |||||
public LossFunctionWrapper(Action fn, | |||||
string reduction = ReductionV2.AUTO, | |||||
string name = null) | |||||
: base(reduction: reduction, | |||||
name: name) | |||||
{ | |||||
this.fn = fn; | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,12 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
namespace Tensorflow.Keras.Losses | |||||
{ | |||||
public class LossesApi | |||||
{ | |||||
public ILossFunc SparseCategoricalCrossentropy(bool from_logits = false) | |||||
=> new SparseCategoricalCrossentropy(from_logits: from_logits); | |||||
} | |||||
} |
@@ -0,0 +1,11 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
namespace Tensorflow.Keras.Losses | |||||
{ | |||||
public class ReductionV2 | |||||
{ | |||||
public const string AUTO = "auto"; | |||||
} | |||||
} |
@@ -0,0 +1,24 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
namespace Tensorflow.Keras.Losses | |||||
{ | |||||
public class SparseCategoricalCrossentropy : LossFunctionWrapper, ILossFunc | |||||
{ | |||||
public SparseCategoricalCrossentropy(bool from_logits = false, | |||||
string reduction = ReductionV2.AUTO, | |||||
string name = "sparse_categorical_crossentropy") : | |||||
base(sparse_categorical_crossentropy, | |||||
reduction: reduction, | |||||
name: name) | |||||
{ | |||||
} | |||||
static void sparse_categorical_crossentropy() | |||||
{ | |||||
} | |||||
} | |||||
} |
@@ -25,7 +25,7 @@ namespace Tensorflow.Keras | |||||
var image = path_to_image(image_paths[i], image_size, num_channels, interpolation); | var image = path_to_image(image_paths[i], image_size, num_channels, interpolation); | ||||
data[i] = image.numpy(); | data[i] = image.numpy(); | ||||
if (i % 100 == 0) | if (i % 100 == 0) | ||||
Console.WriteLine($"Filled {i}/{image_paths.Length} data into memory."); | |||||
Console.WriteLine($"Filled {i}/{image_paths.Length} data into ndarray."); | |||||
} | } | ||||
var img_ds = tf.data.Dataset.from_tensor_slices(data); | var img_ds = tf.data.Dataset.from_tensor_slices(data); | ||||