@@ -276,8 +276,8 @@ namespace Tensorflow | |||||
{ | { | ||||
if (axis == null) | if (axis == null) | ||||
{ | { | ||||
var a = t1.Data<T>(); | |||||
var b = t2.Data<T>(); | |||||
var a = t1.ToArray<T>(); | |||||
var b = t2.ToArray<T>(); | |||||
for (int i = 0; i < a.Length; i++) | for (int i = 0; i < a.Length; i++) | ||||
yield return (a[i], b[i]); | yield return (a[i], b[i]); | ||||
} | } | ||||
@@ -162,7 +162,7 @@ namespace Tensorflow | |||||
var num_labels = labels_dense.dims[0]; | var num_labels = labels_dense.dims[0]; | ||||
var index_offset = np.arange(num_labels) * num_classes; | var index_offset = np.arange(num_labels) * num_classes; | ||||
var labels_one_hot = np.zeros((num_labels, num_classes)); | var labels_one_hot = np.zeros((num_labels, num_classes)); | ||||
var labels = labels_dense.Data<byte>(); | |||||
var labels = labels_dense.ToArray<byte>(); | |||||
for (int row = 0; row < num_labels; row++) | for (int row = 0; row < num_labels; row++) | ||||
{ | { | ||||
var col = labels[row]; | var col = labels[row]; | ||||
@@ -176,7 +176,7 @@ namespace Tensorflow | |||||
{ | { | ||||
var buffer = new byte[sizeof(uint)]; | var buffer = new byte[sizeof(uint)]; | ||||
var count = bytestream.Read(buffer, 0, 4); | var count = bytestream.Read(buffer, 0, 4); | ||||
return np.frombuffer(buffer, ">u4").Data<int>()[0]; | |||||
return np.frombuffer(buffer, ">u4").ToArray<int>()[0]; | |||||
} | } | ||||
} | } | ||||
} | } |
@@ -506,7 +506,7 @@ namespace Tensorflow.Gradients | |||||
if (!(axes is null)) | if (!(axes is null)) | ||||
{ | { | ||||
var rank = input_0_shape.Length; | var rank = input_0_shape.Length; | ||||
if (Enumerable.SequenceEqual(Enumerable.Range(0, rank), axes.Data<int>())) | |||||
if (Enumerable.SequenceEqual(Enumerable.Range(0, rank), axes.ToArray<int>())) | |||||
{ | { | ||||
if (tf.Context.executing_eagerly()) | if (tf.Context.executing_eagerly()) | ||||
{ | { | ||||
@@ -62,13 +62,9 @@ namespace Tensorflow.NumPy | |||||
public byte[] ToByteArray() => _tensor.BufferToArray(); | public byte[] ToByteArray() => _tensor.BufferToArray(); | ||||
public static string[] AsStringArray(NDArray arr) => throw new NotImplementedException(""); | public static string[] AsStringArray(NDArray arr) => throw new NotImplementedException(""); | ||||
public T[] Data<T>() where T : unmanaged | |||||
=> _tensor.ToArray<T>(); | |||||
public T[] ToArray<T>() where T : unmanaged | public T[] ToArray<T>() where T : unmanaged | ||||
=> _tensor.ToArray<T>(); | => _tensor.ToArray<T>(); | ||||
public static NDArray operator /(NDArray x, NDArray y) => throw new NotImplementedException(""); | |||||
public override string ToString() | public override string ToString() | ||||
{ | { | ||||
return tensor_util.to_numpy_string(_tensor); | return tensor_util.to_numpy_string(_tensor); | ||||
@@ -257,7 +257,7 @@ scalar with value '-1' to describe an unknown shape.", value_)); | |||||
var dest_dtype_shape_array = np.array(x_).astype(cast_dtype.as_system_dtype()); | var dest_dtype_shape_array = np.array(x_).astype(cast_dtype.as_system_dtype()); | ||||
long[] y_ = { }; | long[] y_ = { }; | ||||
foreach (int y in dest_dtype_shape_array.Data<int>()) | |||||
foreach (int y in dest_dtype_shape_array.ToArray<int>()) | |||||
if (y >= 0) | if (y >= 0) | ||||
y_[y_.Length] = y; | y_[y_.Length] = y; | ||||
else | else | ||||
@@ -55,7 +55,7 @@ namespace Tensorflow.Keras | |||||
value = 0f; | value = 0f; | ||||
var type = dtypes.tf_dtype_from_name(dtype); | var type = dtypes.tf_dtype_from_name(dtype); | ||||
var nd = new NDArray((length.Count(), maxlen.Value), dtype: type); | |||||
var nd = np.zeros((length.Count(), maxlen.Value), dtype: type); | |||||
for (int i = 0; i < nd.dims[0]; i++) | for (int i = 0; i < nd.dims[0]; i++) | ||||
{ | { | ||||
@@ -249,7 +249,7 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
Assert.AreNotEqual(padded[1, i], 0); | Assert.AreNotEqual(padded[1, i], 0); | ||||
} | } | ||||
[TestMethod, Ignore("slice assign doesn't work")] | |||||
[TestMethod] | |||||
public void PadSequencesPrePaddingTrunc() | public void PadSequencesPrePaddingTrunc() | ||||
{ | { | ||||
var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | ||||
@@ -261,15 +261,15 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
Assert.AreEqual(4, padded.dims[0]); | Assert.AreEqual(4, padded.dims[0]); | ||||
Assert.AreEqual(15, padded.dims[1]); | Assert.AreEqual(15, padded.dims[1]); | ||||
Assert.AreEqual(tokenizer.word_index["worst"], padded[0, 12]); | |||||
Assert.AreEqual(padded[0, 12], tokenizer.word_index["worst"]); | |||||
for (var i = 0; i < 3; i++) | for (var i = 0; i < 3; i++) | ||||
Assert.AreEqual(0, padded[0, i]); | |||||
Assert.AreEqual(tokenizer.word_index["proud"], padded[1, 3]); | |||||
Assert.AreEqual(padded[0, i], 0); | |||||
Assert.AreEqual(padded[1, 3], tokenizer.word_index["proud"]); | |||||
for (var i = 0; i < 15; i++) | for (var i = 0; i < 15; i++) | ||||
Assert.AreNotEqual(0, padded[1, i]); | |||||
Assert.AreNotEqual(padded[1, i], 0); | |||||
} | } | ||||
[TestMethod, Ignore("slice assign doesn't work")] | |||||
[TestMethod] | |||||
public void PadSequencesPrePaddingTrunc_Larger() | public void PadSequencesPrePaddingTrunc_Larger() | ||||
{ | { | ||||
var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | ||||
@@ -281,13 +281,13 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
Assert.AreEqual(4, padded.dims[0]); | Assert.AreEqual(4, padded.dims[0]); | ||||
Assert.AreEqual(45, padded.dims[1]); | Assert.AreEqual(45, padded.dims[1]); | ||||
Assert.AreEqual(tokenizer.word_index["worst"], padded[0, 42]); | |||||
Assert.AreEqual(padded[0, 42], tokenizer.word_index["worst"]); | |||||
for (var i = 0; i < 33; i++) | for (var i = 0; i < 33; i++) | ||||
Assert.AreEqual(0, padded[0, i]); | |||||
Assert.AreEqual(tokenizer.word_index["proud"], padded[1, 33]); | |||||
Assert.AreEqual(padded[0, i], 0); | |||||
Assert.AreEqual(padded[1, 33], tokenizer.word_index["proud"]); | |||||
} | } | ||||
[TestMethod, Ignore("slice assign doesn't work")] | |||||
[TestMethod] | |||||
public void PadSequencesPostPaddingTrunc() | public void PadSequencesPostPaddingTrunc() | ||||
{ | { | ||||
var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | ||||
@@ -299,15 +299,15 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
Assert.AreEqual(4, padded.dims[0]); | Assert.AreEqual(4, padded.dims[0]); | ||||
Assert.AreEqual(15, padded.dims[1]); | Assert.AreEqual(15, padded.dims[1]); | ||||
Assert.AreEqual(tokenizer.word_index["worst"], padded[0, 9]); | |||||
Assert.AreEqual(padded[0, 9], tokenizer.word_index["worst"]); | |||||
for (var i = 12; i < 15; i++) | for (var i = 12; i < 15; i++) | ||||
Assert.AreEqual(0, padded[0, i]); | |||||
Assert.AreEqual(tokenizer.word_index["proud"], padded[1, 10]); | |||||
Assert.AreEqual(padded[0, i], 0); | |||||
Assert.AreEqual(padded[1, 10], tokenizer.word_index["proud"]); | |||||
for (var i = 0; i < 15; i++) | for (var i = 0; i < 15; i++) | ||||
Assert.AreNotEqual(0, padded[1, i]); | |||||
Assert.AreNotEqual(padded[1, i], 0); | |||||
} | } | ||||
[TestMethod, Ignore("slice assign doesn't work")] | |||||
[TestMethod] | |||||
public void PadSequencesPostPaddingTrunc_Larger() | public void PadSequencesPostPaddingTrunc_Larger() | ||||
{ | { | ||||
var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | var tokenizer = keras.preprocessing.text.Tokenizer(oov_token: OOV); | ||||
@@ -319,10 +319,10 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
Assert.AreEqual(4, padded.dims[0]); | Assert.AreEqual(4, padded.dims[0]); | ||||
Assert.AreEqual(45, padded.dims[1]); | Assert.AreEqual(45, padded.dims[1]); | ||||
Assert.AreEqual(tokenizer.word_index["worst"], padded[0, 9]); | |||||
Assert.AreEqual(padded[0, 9], tokenizer.word_index["worst"]); | |||||
for (var i = 32; i < 45; i++) | for (var i = 32; i < 45; i++) | ||||
Assert.AreEqual(0, padded[0, i]); | |||||
Assert.AreEqual(tokenizer.word_index["proud"], padded[1, 10]); | |||||
Assert.AreEqual(padded[0, i], 0); | |||||
Assert.AreEqual(padded[1, 10], tokenizer.word_index["proud"]); | |||||
} | } | ||||
[TestMethod] | [TestMethod] | ||||
@@ -104,7 +104,7 @@ namespace Tensorflow.Native.UnitTest.Tensors | |||||
EXPECT_EQ(tensor.shape[0], nd.dims[0]); | EXPECT_EQ(tensor.shape[0], nd.dims[0]); | ||||
EXPECT_EQ(tensor.shape[1], nd.dims[1]); | EXPECT_EQ(tensor.shape[1], nd.dims[1]); | ||||
EXPECT_EQ(tensor.bytesize, nd.size * sizeof(float)); | EXPECT_EQ(tensor.bytesize, nd.size * sizeof(float)); | ||||
Assert.IsTrue(Enumerable.SequenceEqual(nd.Data<float>(), new float[] { 1, 2, 3, 4, 5, 6 })); | |||||
Assert.IsTrue(Enumerable.SequenceEqual(nd.ToArray<float>(), new float[] { 1, 2, 3, 4, 5, 6 })); | |||||
} | } | ||||
/// <summary> | /// <summary> | ||||
@@ -26,43 +26,43 @@ namespace TensorFlowNET.UnitTest.NumPy | |||||
public void arange() | public void arange() | ||||
{ | { | ||||
var x = np.arange(3); | var x = np.arange(3); | ||||
AssetSequenceEqual(new[] { 0, 1, 2 }, x.Data<int>()); | |||||
AssetSequenceEqual(new[] { 0, 1, 2 }, x.ToArray<int>()); | |||||
x = np.arange(3f); | x = np.arange(3f); | ||||
Assert.IsTrue(Equal(new float[] { 0, 1, 2 }, x.Data<float>())); | |||||
Assert.IsTrue(Equal(new float[] { 0, 1, 2 }, x.ToArray<float>())); | |||||
var y = np.arange(3, 7); | var y = np.arange(3, 7); | ||||
AssetSequenceEqual(new[] { 3, 4, 5, 6 }, y.Data<int>()); | |||||
AssetSequenceEqual(new[] { 3, 4, 5, 6 }, y.ToArray<int>()); | |||||
y = np.arange(3, 7, 2); | y = np.arange(3, 7, 2); | ||||
AssetSequenceEqual(new[] { 3, 5 }, y.Data<int>()); | |||||
AssetSequenceEqual(new[] { 3, 5 }, y.ToArray<int>()); | |||||
} | } | ||||
[TestMethod] | [TestMethod] | ||||
public void array() | public void array() | ||||
{ | { | ||||
var x = np.array(1, 2, 3); | var x = np.array(1, 2, 3); | ||||
AssetSequenceEqual(new[] { 1, 2, 3 }, x.Data<int>()); | |||||
AssetSequenceEqual(new[] { 1, 2, 3 }, x.ToArray<int>()); | |||||
x = np.array(new[,] { { 1, 2 }, { 3, 4 }, { 5, 6 } }); | x = np.array(new[,] { { 1, 2 }, { 3, 4 }, { 5, 6 } }); | ||||
AssetSequenceEqual(new[] { 1, 2, 3, 4, 5, 6 }, x.Data<int>()); | |||||
AssetSequenceEqual(new[] { 1, 2, 3, 4, 5, 6 }, x.ToArray<int>()); | |||||
} | } | ||||
[TestMethod] | [TestMethod] | ||||
public void eye() | public void eye() | ||||
{ | { | ||||
var x = np.eye(3, k: 1); | var x = np.eye(3, k: 1); | ||||
Assert.IsTrue(Equal(new double[] { 0, 1, 0, 0, 0, 1, 0, 0, 0 }, x.Data<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 0, 1, 0, 0, 0, 1, 0, 0, 0 }, x.ToArray<double>())); | |||||
} | } | ||||
[TestMethod] | [TestMethod] | ||||
public void linspace() | public void linspace() | ||||
{ | { | ||||
var x = np.linspace(2.0, 3.0, num: 5); | var x = np.linspace(2.0, 3.0, num: 5); | ||||
Assert.IsTrue(Equal(new double[] { 2, 2.25, 2.5, 2.75, 3 }, x.Data<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 2, 2.25, 2.5, 2.75, 3 }, x.ToArray<double>())); | |||||
x = np.linspace(2.0, 3.0, num: 5, endpoint: false); | x = np.linspace(2.0, 3.0, num: 5, endpoint: false); | ||||
Assert.IsTrue(Equal(new double[] { 2, 2.2, 2.4, 2.6, 2.8 }, x.Data<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 2, 2.2, 2.4, 2.6, 2.8 }, x.ToArray<double>())); | |||||
} | } | ||||
[TestMethod] | [TestMethod] | ||||
@@ -71,13 +71,13 @@ namespace TensorFlowNET.UnitTest.NumPy | |||||
var x = np.linspace(0, 1, num: 3); | var x = np.linspace(0, 1, num: 3); | ||||
var y = np.linspace(0, 1, num: 2); | var y = np.linspace(0, 1, num: 2); | ||||
var (xv, yv) = np.meshgrid(x, y); | var (xv, yv) = np.meshgrid(x, y); | ||||
Assert.IsTrue(Equal(new double[] { 0, 0.5, 1, 0, 0.5, 1 }, xv.Data<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 0, 0, 0, 1, 1, 1 }, yv.Data<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 0, 0.5, 1, 0, 0.5, 1 }, xv.ToArray<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 0, 0, 0, 1, 1, 1 }, yv.ToArray<double>())); | |||||
(xv, yv) = np.meshgrid(x, y, sparse: true); | (xv, yv) = np.meshgrid(x, y, sparse: true); | ||||
Assert.IsTrue(Equal(new double[] { 0, 0.5, 1 }, xv.Data<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 0, 0.5, 1 }, xv.ToArray<double>())); | |||||
AssetSequenceEqual(new long[] { 1, 3 }, xv.shape.dims); | AssetSequenceEqual(new long[] { 1, 3 }, xv.shape.dims); | ||||
Assert.IsTrue(Equal(new double[] { 0, 1 }, yv.Data<double>())); | |||||
Assert.IsTrue(Equal(new double[] { 0, 1 }, yv.ToArray<double>())); | |||||
AssetSequenceEqual(new long[] { 2, 1 }, yv.shape.dims); | AssetSequenceEqual(new long[] { 2, 1 }, yv.shape.dims); | ||||
} | } | ||||
} | } | ||||
@@ -25,7 +25,7 @@ namespace TensorFlowNET.UnitTest.NumPy | |||||
p = np.prod(new[,] { { 1.0, 2.0 }, { 3.0, 4.0 } }, axis: 1); | p = np.prod(new[,] { { 1.0, 2.0 }, { 3.0, 4.0 } }, axis: 1); | ||||
Assert.AreEqual(p.shape, 2); | Assert.AreEqual(p.shape, 2); | ||||
Assert.IsTrue(Equal(p.Data<double>(), new[] { 2.0, 12.0 })); | |||||
Assert.IsTrue(Equal(p.ToArray<double>(), new[] { 2.0, 12.0 })); | |||||
} | } | ||||
} | } | ||||
} | } |