From 5e4f53077f94ddf8513dd925f18eeb05b81a9482 Mon Sep 17 00:00:00 2001 From: dogvane Date: Sun, 8 Oct 2023 21:52:55 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E6=AD=A3=E5=9B=BE=E7=89=87=E5=B7=A6?= =?UTF-8?q?=E5=8F=B3=E5=92=8C=E4=B8=8A=E4=B8=8B=E7=BF=BB=E8=BD=AC=E7=9A=84?= =?UTF-8?q?=E9=97=AE=E9=A2=98=EF=BC=8C=E5=B9=B6=E5=A2=9E=E5=8A=A0=E5=AF=B9?= =?UTF-8?q?=E5=BA=94=E6=B5=8B=E8=AF=95=E7=94=A8=E4=BE=8B=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- data/img001.bmp | Bin 0 -> 178662 bytes src/TensorFlowNET.Core/APIs/tf.image.cs | 7 + src/TensorFlowNET.Core/APIs/tf.io.cs | 7 + .../Keras/Layers/ILayersApi.cs | 6 + .../Operations/image_ops_impl.cs | 43 ++- src/TensorFlowNET.Keras/Layers/LayersApi.cs | 23 +- .../TensorFlowNET.Graph.UnitTest/ImageTest.cs | 90 +++++ .../ManagedAPI/ArrayOpsTest.cs | 317 ++++++++++++++++++ .../TensorFlowNET.UnitTest/NumPy/ShapeTest.cs | 44 +++ 9 files changed, 525 insertions(+), 12 deletions(-) create mode 100644 data/img001.bmp create mode 100644 test/TensorFlowNET.UnitTest/NumPy/ShapeTest.cs diff --git a/data/img001.bmp b/data/img001.bmp new file mode 100644 index 0000000000000000000000000000000000000000..d149d76f1ac11b4f5f6700560f9bba04868f8ab4 GIT binary patch literal 178662 zcmeI5G14wM4MiWUf?&c4SOE(lBVd!j0!Y~qMKT#V2t4KLvfOT2mSnm6zIrp&-Jjdm zJ@@?Io0+1DKmPfj|M=~X|NZ&{{q=kL>)*fr^_w5RqpKf3{{HLd|G)Y5Z~wtB5C8!X z009sH0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X009sH z0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI z5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X z009sH0T2Lz&k(r&|61buBLx2SmmlSK$@^B>Khh>*RsvP*syE!OaBy)hfkte)rC)IS z+(I#VA;86k+i>Fr1n$J9Xdn6BerwE+=NgC&w?FR2(ecM(!|kKX{qpdSV#Dn(>v3-O zuGkcZe7^oVH{f{kYeu>rFUF?_5x5hZB7CIukHkF4Nt00rV#93{!q{XBfw9FtGIoe3AQ2mG$M=F07YPW(hTDsr@p>=;x!7&*!`i4C{So5F@u1pLH?+fzkgu`K~t zvEjCDS=dcZz*}s%E$;#=4G9E@4Yv*Z!d5~8F=E4QVI5dzMIcOUxNTJ#_K^^X6dP_! z8o?S90>NU#ZIjlp$s>VyvElY(DOj*hV2jvrd%d{KJN*39FOuJ`5q60Ux39JS?N5Kj z?{^R=wSMG(*!&pt9i1XYC4lek`n_G5;Nl$w;5OWThsitz+^%lZ1?Ph&hY~2YKK_H~ zwf%FiB{6xAdGbpBa|f+48F=NaE&o;It~g2QCdBq`B8+)QY2)w`)11beJvx=x#6(!S z6TPr)>1o@9({V#95&M;^BaM)9!&o($&MA~r=HmnHu~3!vwB#8k{NF~1?N zJKu?PU9;ty50+DGf-_b67MLc7&cN5V(QN54=*&-< zjoEOcTl0~+8&+qUi|*pR4{5?YVs{IgEqyOf_34V3@p?Bt*IRXJ*B85DmziD9rOk!a zC`$L9++%-5Od&w4X{*W2h1LGH`wYYm&|6rIqI6qIF}vwv3K_0PXnYRPV`b(?v0dkv zm!d0uh0K>br@tmc>eVE-LZ2eg5G6s)BPpl@KEn=l~tC<7#S&Z3GwHlb4V70UPJ`%CZ zp?;xeTUadJoT1Z(i!mFjbh|#Mp9AZ&H9N?~E-!jSQ&g~6dIKTt?RrcF+cAwsfbrs8 zpLVULhmZ3#>FD%h+PJ-imAqKQo0FD*G;TqsyScNS(Sd>)Ao8@Y?{X;<_d7D(rwwA z?`9Qq1$7B!(zrRF>xNyiuM6bY>tR=bTa_-)yJe+e%oW;{y{UT(jlrRYVwY1^@-Ct_ zEPX(1`$x7h8|LzKP0@5Ke>Y&Ix!5%GN@O+X!pgSv0RFloY-2Xe=~g&ZQ?&4{m}MSf zm&tXh)sQPL+tTZ{*)Ggu>d5O_4uI(=HVxmTrrXCvI=s!c=A}3LC%H9`serGjIEJ39 z*d;=f{V|sUFW=L=bbdsJCmu0p`Oo$n!!1B;3Nn>_7VI4;kJ9hX+W6cl=2Qz){l>70 z5u38evri!%Q*tVOd}#cqPBG<0H09E5JcQ=NFtN+I4S{BudMig-U%9(*D*f)bUC;eu z3cCDf(jUk0UJw)WuBL9M z-jVD6NyMgkx*YDH)%n#IF{P`=vwATsX1A$sr&iI%`{WUu=Hwi@xWk)>bH z4ZaSJ$y>YD8jwAq*p&JTfg>`<5?s3Z{K5@`WAfszwF+leFUu)5#koS@h}3!SUJNe% zZVd0|@iBRKwOQA&X~m}0l>mq1!6d$Pj}(u4Zi&eoEag5)iBjXWb69JCJg9h!nrF5q(gKc-k9<)zddg&_yN)yC5KFn&+=#^jw^ z>3xdW)GUYKS9jTYZ|UCY>i&CU@>)&YK1l5NFKw!RZ|T)N?~Cm*x!#MB4-%V_n#Y&W z+g`e4$n`ZDVjA`fu_a1mC|xq=;+hOGt=v9KY%0zw_^bmNN}rusu|7%6^_C&Fy!|Al z%Lmr2OcIm-E7eq-Qpg#>4WTticN{8Vh#wXKT1`)I>qCQ zsbb2%y5A&r@Rv3zk*f4ZMb!(k#gym`vDI~#7qgYV80YTwgfYi|WOv2BOPB5WI9k}W z2}}3Lo-pR%tNUeQ$N$qvQ7B>QA90&va=q!{(PGp5&6oBrv1t;xO%OKzYD}P8?)Jv< zW{>7^W_K`L54R0rDz@TZ-HqOgJ$Jpr#oqX(O%UMrE)(jxT#5SCUDxEl=>CxI9UE@% zYZUQ;S34Irpe*9us*+K@rAu_3T9YtlBX(Es>QA|titM`hI#}~3j_bKon-zrJWN3qM~+?htI=9cF* zdD5KH-0~q!4mI2)N$G}(x?5$4DHOZR&&_Gn>fUy<&pzz#+&tvt&7KmlK11nWh&d%T z0qM%q2=;g@1pBy8v*2z#;c;Qh;C2A&S+UDVUA^g}#uThd|Fq@`Einw_*+|^z->S5i(;3-rd%@d zd;nx>n87|VAHi)W&L(2hY%4kmRhJW38Ag7J)k?T+Me@3jmDpv^Bt|6}Ix+o>#fi=DQ;JI?Pp+?KOVC3YFx+(YvX<#^3k!fgrC zuGREV@~-O|a}~`ubd5jW9Bz;K(5Gp|E|VMDXt>oQ)qn+Xdki_b*oJz)Ol%?pw+C)_ z#U8BR^^u8|CY$Uc&^;G!cd^jJQ^YRwFPgdd`W?f)&*65L|5;*p_3q}nd)T;rx9qIv zaGUpaqS%z8@snnQYb7{ywVU$dkeR()f_E01yP8qc;!kg z^EKSA?VK)l2{PsC)Zz;2lnc0BntF%WQ+mH!Y(fFI3BsGirZ_@rh0Ds%%ZY!7+b)m5SzO0YTSLu#dyJ!54!ykv5TDC9KMV2HMc({_M_f+R=nf+ zr`+y}-4J-+ZSzHay?tNUL85p3!15Fn`I};wuCG4V>wyNZ{D@oejwfBWU)kVYsH$|i zxLfw(=kG9?r=ZH3@9h_t*o_9axfSntQtGaHciVsc!>ae|lXe5X!(^lY0T2KI5C8!X z009sH0T2KI5C8!X009sH0T2KI5C8!X009sH0T2KI5C8!X009svKkpSF2u{F#r sApo}}l(6PR0&x4p)-bb#0Nj>P!kQBa!0i)T!^{!_a9ctNYfdEaUtXM_NB{r; literal 0 HcmV?d00001 diff --git a/src/TensorFlowNET.Core/APIs/tf.image.cs b/src/TensorFlowNET.Core/APIs/tf.image.cs index ac9cbc60..41ef5296 100644 --- a/src/TensorFlowNET.Core/APIs/tf.image.cs +++ b/src/TensorFlowNET.Core/APIs/tf.image.cs @@ -339,6 +339,13 @@ namespace Tensorflow => image_ops_impl.decode_image(contents, channels: channels, dtype: dtype, name: name, expand_animations: expand_animations); + public Tensor encode_png(Tensor contents, string name = null) + => image_ops_impl.encode_png(contents, name: name); + + public Tensor encode_jpeg(Tensor contents, string name = null) + => image_ops_impl.encode_jpeg(contents, name: name); + + /// /// Convenience function to check if the 'contents' encodes a JPEG image. /// diff --git a/src/TensorFlowNET.Core/APIs/tf.io.cs b/src/TensorFlowNET.Core/APIs/tf.io.cs index be1e86e6..ea1e44b2 100644 --- a/src/TensorFlowNET.Core/APIs/tf.io.cs +++ b/src/TensorFlowNET.Core/APIs/tf.io.cs @@ -16,6 +16,7 @@ using System.Collections.Generic; using Tensorflow.IO; +using Tensorflow.Operations; namespace Tensorflow { @@ -46,6 +47,12 @@ namespace Tensorflow public Tensor[] restore_v2(Tensor prefix, string[] tensor_names, string[] shape_and_slices, TF_DataType[] dtypes, string name = null) => ops.restore_v2(prefix, tensor_names, shape_and_slices, dtypes, name: name); + + public Operation write_file(string filename, Tensor conentes, string name = null) + => write_file(Tensorflow.ops.convert_to_tensor(filename, TF_DataType.TF_STRING), conentes, name); + + public Operation write_file(Tensor filename, Tensor conentes, string name = null) + => gen_ops.write_file(filename, conentes, name); } public GFile gfile = new GFile(); diff --git a/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs b/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs index a8141d35..3fd98e7a 100644 --- a/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs +++ b/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs @@ -55,6 +55,12 @@ namespace Tensorflow.Keras.Layers string kernel_initializer = "glorot_uniform", string bias_initializer = "zeros"); + public ILayer Conv2D(int filters, + Shape kernel_size = null, + Shape strides = null, + string padding = "valid" + ); + public ILayer Conv2D(int filters, Shape kernel_size = null, Shape strides = null, diff --git a/src/TensorFlowNET.Core/Operations/image_ops_impl.cs b/src/TensorFlowNET.Core/Operations/image_ops_impl.cs index 318b8b14..f1aff28e 100644 --- a/src/TensorFlowNET.Core/Operations/image_ops_impl.cs +++ b/src/TensorFlowNET.Core/Operations/image_ops_impl.cs @@ -102,7 +102,10 @@ namespace Tensorflow { throw new ValueError("\'image\' must be fully defined."); } - var dims = image_shape["-3:"]; + var dims = new Shape(new[] { + image_shape.dims[image_shape.dims.Length - 3], + image_shape.dims[image_shape.dims.Length - 2], + image_shape.dims[image_shape.dims.Length - 1]}); foreach (var dim in dims.dims) { if (dim == 0) @@ -112,16 +115,18 @@ namespace Tensorflow } var image_shape_last_three_elements = new Shape(new[] { - image_shape.dims[image_shape.dims.Length - 1], + image_shape.dims[image_shape.dims.Length - 3], image_shape.dims[image_shape.dims.Length - 2], - image_shape.dims[image_shape.dims.Length - 3]}); + image_shape.dims[image_shape.dims.Length - 1]}); if (!image_shape_last_three_elements.IsFullyDefined) { Tensor image_shape_ = array_ops.shape(image); - var image_shape_return = tf.constant(new[] { - image_shape_.dims[image_shape.dims.Length - 1], - image_shape_.dims[image_shape.dims.Length - 2], - image_shape_.dims[image_shape.dims.Length - 3]}); + var image_shape_return = tf.slice(image_shape_, new[] { Math.Max(image_shape.dims.Length - 3, 0) }, new[] { 3 }); + + //var image_shape_return = tf.constant(new[] { + // image_shape_.dims[image_shape_.dims.Length - 3], + // image_shape_.dims[image_shape_.dims.Length - 2], + // image_shape_.dims[image_shape_.dims.Length - 1]}); return new Operation[] { check_ops.assert_positive( @@ -209,10 +214,10 @@ namespace Tensorflow } public static Tensor flip_left_right(Tensor image) - => _flip(image, 0, "flip_left_right"); + => _flip(image, 1, "flip_left_right"); public static Tensor flip_up_down(Tensor image) - => _flip(image, 1, "flip_up_down"); + => _flip(image, 0, "flip_up_down"); internal static Tensor _flip(Tensor image, int flip_index, string scope_name) { @@ -223,11 +228,11 @@ namespace Tensorflow Shape shape = image.shape; if (shape.ndim == 3 || shape.ndim == Unknown) { - return fix_image_flip_shape(image, gen_array_ops.reverse(image, ops.convert_to_tensor(new int[] { flip_index }))); + return fix_image_flip_shape(image, gen_array_ops.reverse_v2(image, ops.convert_to_tensor(new int[] { flip_index }))); } else if (shape.ndim == 4) { - return gen_array_ops.reverse_v2(image, ops.convert_to_tensor(new[] { (flip_index + 1) % 2 })); + return gen_array_ops.reverse_v2(image, ops.convert_to_tensor(new[] { flip_index + 1 })); } else { @@ -2047,6 +2052,22 @@ new_height, new_width"); }); } + public static Tensor encode_jpeg(Tensor contents, string name = null) + { + return tf_with(ops.name_scope(name, "encode_jpeg"), scope => + { + return gen_ops.encode_jpeg(contents, name:name); + }); + } + + public static Tensor encode_png(Tensor contents, string name = null) + { + return tf_with(ops.name_scope(name, "encode_png"), scope => + { + return gen_ops.encode_png(contents, name: name); + }); + } + public static Tensor is_jpeg(Tensor contents, string name = null) { return tf_with(ops.name_scope(name, "is_jpeg"), scope => diff --git a/src/TensorFlowNET.Keras/Layers/LayersApi.cs b/src/TensorFlowNET.Keras/Layers/LayersApi.cs index 95828fbf..bcc19dc2 100644 --- a/src/TensorFlowNET.Keras/Layers/LayersApi.cs +++ b/src/TensorFlowNET.Keras/Layers/LayersApi.cs @@ -112,7 +112,28 @@ namespace Tensorflow.Keras.Layers KernelInitializer = GetInitializerByName(kernel_initializer), BiasInitializer = GetInitializerByName(bias_initializer) }); - + public ILayer Conv2D(int filters, + Shape kernel_size = null, + Shape strides = null, + string padding = "valid") + => new Conv2D(new Conv2DArgs + { + Rank = 2, + Filters = filters, + KernelSize = (kernel_size == null) ? (5, 5) : kernel_size, + Strides = strides == null ? (1, 1) : strides, + Padding = padding, + DataFormat = null, + DilationRate = (1, 1), + Groups = 1, + UseBias = false, + KernelRegularizer = null, + KernelInitializer =tf.glorot_uniform_initializer, + BiasInitializer = tf.zeros_initializer, + BiasRegularizer = null, + ActivityRegularizer = null, + Activation = keras.activations.Linear, + }); /// /// 2D convolution layer (e.g. spatial convolution over images). /// This layer creates a convolution kernel that is convolved with the layer input to produce a tensor of outputs. diff --git a/test/TensorFlowNET.Graph.UnitTest/ImageTest.cs b/test/TensorFlowNET.Graph.UnitTest/ImageTest.cs index d671b609..127b65bf 100644 --- a/test/TensorFlowNET.Graph.UnitTest/ImageTest.cs +++ b/test/TensorFlowNET.Graph.UnitTest/ImageTest.cs @@ -4,6 +4,7 @@ using System.Linq; using Tensorflow; using static Tensorflow.Binding; using System; +using System.IO; namespace TensorFlowNET.UnitTest { @@ -164,5 +165,94 @@ namespace TensorFlowNET.UnitTest Assert.AreEqual(result.size, 16ul); Assert.AreEqual(result[0, 0, 0, 0], 12f); } + + [TestMethod] + public void ImageSaveTest() + { + var imgPath = TestHelper.GetFullPathFromDataDir("img001.bmp"); + var jpegImgPath = TestHelper.GetFullPathFromDataDir("img001.jpeg"); + var pngImgPath = TestHelper.GetFullPathFromDataDir("img001.png"); + + File.Delete(jpegImgPath); + File.Delete(pngImgPath); + + var contents = tf.io.read_file(imgPath); + var bmp = tf.image.decode_image(contents); + Assert.AreEqual(bmp.name, "decode_image/DecodeImage:0"); + + var jpeg = tf.image.encode_jpeg(bmp); + var op1 = tf.io.write_file(jpegImgPath, jpeg); + + var png = tf.image.encode_png(bmp); + var op2 = tf.io.write_file(pngImgPath, png); + + this.session().run(op1); + this.session().run(op2); + + Assert.IsTrue(File.Exists(jpegImgPath), "not find file:" + jpegImgPath); + Assert.IsTrue(File.Exists(pngImgPath), "not find file:" + pngImgPath); + + // 如果要测试图片正确性,需要注释下面两行代码 + File.Delete(jpegImgPath); + File.Delete(pngImgPath); + } + + [TestMethod] + public void ImageFlipTest() + { + var imgPath = TestHelper.GetFullPathFromDataDir("img001.bmp"); + + var contents = tf.io.read_file(imgPath); + var bmp = tf.image.decode_image(contents); + + // 左右翻转 + var lrImgPath = TestHelper.GetFullPathFromDataDir("img001_lr.png"); + File.Delete(lrImgPath); + + var lr = tf.image.flip_left_right(bmp); + var png = tf.image.encode_png(lr); + var op = tf.io.write_file(lrImgPath, png); + this.session().run(op); + + Assert.IsTrue(File.Exists(lrImgPath), "not find file:" + lrImgPath); + + // 上下翻转 + var updownImgPath = TestHelper.GetFullPathFromDataDir("img001_updown.png"); + File.Delete(updownImgPath); + + var updown = tf.image.flip_up_down(bmp); + var pngupdown = tf.image.encode_png(updown); + var op2 = tf.io.write_file(updownImgPath, pngupdown); + this.session().run(op2); + Assert.IsTrue(File.Exists(updownImgPath)); + + + // 暂时先人工观测图片是否翻转,观测时需要删除下面这两行代码 + File.Delete(lrImgPath); + File.Delete(updownImgPath); + + // 多图翻转 + // 目前直接通过 bmp 拿到 shape ,这里先用默认定义图片大小来构建了 + var mImg = tf.stack(new[] { bmp, lr }, axis:0); + print(mImg.shape); + + var up2 = tf.image.flip_up_down(mImg); + + var updownImgPath_m1 = TestHelper.GetFullPathFromDataDir("img001_m_ud.png"); // 直接上下翻转 + File.Delete(updownImgPath_m1); + + var img001_updown_m2 = TestHelper.GetFullPathFromDataDir("img001_m_lr_ud.png"); // 先左右再上下 + File.Delete(img001_updown_m2); + + var png2 = tf.image.encode_png(up2[0]); + tf.io.write_file(updownImgPath_m1, png2); + + png2 = tf.image.encode_png(up2[1]); + tf.io.write_file(img001_updown_m2, png2); + + // 如果要测试图片正确性,需要注释下面两行代码 + File.Delete(updownImgPath_m1); + File.Delete(img001_updown_m2); + } } } diff --git a/test/TensorFlowNET.UnitTest/ManagedAPI/ArrayOpsTest.cs b/test/TensorFlowNET.UnitTest/ManagedAPI/ArrayOpsTest.cs index 675689bb..e25c9779 100644 --- a/test/TensorFlowNET.UnitTest/ManagedAPI/ArrayOpsTest.cs +++ b/test/TensorFlowNET.UnitTest/ManagedAPI/ArrayOpsTest.cs @@ -3,6 +3,7 @@ using Tensorflow.NumPy; using Tensorflow; using static Tensorflow.Binding; using System.Linq; +using Tensorflow.Operations; namespace TensorFlowNET.UnitTest.ManagedAPI { @@ -105,5 +106,321 @@ namespace TensorFlowNET.UnitTest.ManagedAPI Assert.IsTrue(Equal(a[0].ToArray().Reverse().ToArray(), b[0].ToArray())); Assert.IsTrue(Equal(a[1].ToArray().Reverse().ToArray(), b[1].ToArray())); } + + [TestMethod] + public void ReverseImgArray3D() + { + // 创建 sourceImg 数组 + var sourceImgArray = new float[,,] { + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }; + var sourceImg = ops.convert_to_tensor(sourceImgArray); + + // 创建 lrImg 数组 + var lrImgArray = new float[,,] { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 237, 28, 36 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }; + var lrImg = ops.convert_to_tensor(lrImgArray); + + var lr = tf.image.flip_left_right(sourceImg); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr.numpy().ToArray()), "tf.image.flip_left_right fail."); + + var lr2 = tf.reverse(sourceImg, 1); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr2.numpy().ToArray()), "tf.reverse (axis=1) fail."); + + var lr3 = gen_array_ops.reverse_v2(sourceImg, ops.convert_to_tensor(new[] { 1 })); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr3.numpy().ToArray()), "gen_array_ops.reverse_v2 axis=1 fail."); + + // 创建 udImg 数组 + var udImgArray = new float[,,] { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }; + var udImg = ops.convert_to_tensor(udImgArray); + + var ud = tf.image.flip_up_down(sourceImg); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud.numpy().ToArray()), "tf.image.flip_up_down fail."); + + var ud2 = tf.reverse(sourceImg, new Axis(0)); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud2.numpy().ToArray()), "tf.reverse (axis=0) fail."); + + var ud3 = gen_array_ops.reverse_v2(sourceImg, ops.convert_to_tensor(new[] { 0 })); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud3.numpy().ToArray()), "gen_array_ops.reverse_v2 axis=0 fail."); + } + + [TestMethod] + public void ReverseImgArray4D() + { + // 原图左上角,加一张左右翻转后的图片 + var m = new float[,,,] { + { + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }, + { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 237, 28, 36 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + } + }; + var sourceImg = ops.convert_to_tensor(m); + + var lrArray = new float[,,,] { + { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 237, 28, 36 }, + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }, + { + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 }, + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + } + }; + var lrImg = ops.convert_to_tensor(lrArray); + + // 创建 ud 数组 + var udArray = new float[,,,] { + { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }, + { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 237, 28, 36 } + } + } + }; + var udImg = ops.convert_to_tensor(udArray); + + var ud3 = gen_array_ops.reverse_v2(sourceImg, ops.convert_to_tensor(new[] { 1 })); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud3.numpy().ToArray()), "gen_array_ops.reverse_v2 axis=1 fail."); + + var ud2 = tf.reverse(sourceImg, new Axis(1)); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud2.numpy().ToArray()), "tf.reverse (axis=1) fail."); + + var ud = tf.image.flip_up_down(sourceImg); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud.numpy().ToArray()), "tf.image.flip_up_down fail."); + + // 左右翻转 + var lr = tf.image.flip_left_right(sourceImg); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr.numpy().ToArray()), "tf.image.flip_left_right fail."); + + var lr2 = tf.reverse(sourceImg, 0); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr2.numpy().ToArray()), "tf.reverse (axis=1) fail."); + + var lr3 = gen_array_ops.reverse_v2(sourceImg, ops.convert_to_tensor(new[] { 0 })); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr3.numpy().ToArray()), "gen_array_ops.reverse_v2 axis=1 fail."); + + } + + [TestMethod] + public void ReverseImgArray4D_3x3() + { + // 原图左上角,加一张左右翻转后的图片 + var m = new float[,,,] { + { + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }, + { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 237, 28, 36 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + } + }; + var sourceImg = ops.convert_to_tensor(m); + + var lrArray = new float[,,,] { + { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 237, 28, 36 }, + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }, + { + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 }, + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + } + }; + var lrImg = ops.convert_to_tensor(lrArray); + + // 创建 ud 数组 + var udArray = new float[,,,] { + { + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 237, 28, 36 }, + { 255, 255, 255 }, + { 255, 255, 255 } + } + }, + { { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 255, 255, 255 } + }, + { + { 255, 255, 255 }, + { 255, 255, 255 }, + { 237, 28, 36 } + } + } + }; + var udImg = ops.convert_to_tensor(udArray); + + var ud3 = gen_array_ops.reverse_v2(sourceImg, ops.convert_to_tensor(new[] { 1 })); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud3.numpy().ToArray()), "gen_array_ops.reverse_v2 axis=1 fail."); + + var ud2 = tf.reverse(sourceImg, new Axis(1)); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud2.numpy().ToArray()), "tf.reverse (axis=1) fail."); + + var ud = tf.image.flip_up_down(sourceImg); + Assert.IsTrue(Equal(udImg.numpy().ToArray(), ud.numpy().ToArray()), "tf.image.flip_up_down fail."); + + // 左右翻转 + var lr = tf.image.flip_left_right(sourceImg); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr.numpy().ToArray()), "tf.image.flip_left_right fail."); + + var lr2 = tf.reverse(sourceImg, 0); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr2.numpy().ToArray()), "tf.reverse (axis=1) fail."); + + var lr3 = gen_array_ops.reverse_v2(sourceImg, ops.convert_to_tensor(new[] { 0 })); + Assert.IsTrue(Equal(lrImg.numpy().ToArray(), lr3.numpy().ToArray()), "gen_array_ops.reverse_v2 axis=1 fail."); + + } } } diff --git a/test/TensorFlowNET.UnitTest/NumPy/ShapeTest.cs b/test/TensorFlowNET.UnitTest/NumPy/ShapeTest.cs new file mode 100644 index 00000000..f5a8685b --- /dev/null +++ b/test/TensorFlowNET.UnitTest/NumPy/ShapeTest.cs @@ -0,0 +1,44 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using Tensorflow.NumPy; +using System; +using System.Linq; +using static Tensorflow.Binding; +using Tensorflow; + +namespace TensorFlowNET.UnitTest.NumPy +{ + [TestClass] + public class ShapeTest : EagerModeTestBase + { + [Ignore] + [TestMethod] + public unsafe void ShapeGetLastElements() + { + // test code from function _CheckAtLeast3DImage + // 之前的 _CheckAtLeast3DImage 有bug,现在通过测试,下面的代码是正确的 + // todo: shape["-3:"] 的写法,目前有bug,需要修复,单元测试等修复后再放开,暂时先忽略测试 + + var image_shape = new Shape(new[] { 32, 64, 3 }); + var image_shape_4d = new Shape(new[] { 4, 64, 32, 3 }); + + var image_shape_last_three_elements = new Shape(new[] { + image_shape.dims[image_shape.dims.Length - 3], + image_shape.dims[image_shape.dims.Length - 2], + image_shape.dims[image_shape.dims.Length - 1]}); + + var image_shape_last_three_elements2 = image_shape["-3:"]; + + Assert.IsTrue(Equal(image_shape_last_three_elements.dims, image_shape_last_three_elements2.dims), "3dims get fail."); + + var image_shape_last_three_elements_4d = new Shape(new[] { + image_shape_4d.dims[image_shape_4d.dims.Length - 3], + image_shape_4d.dims[image_shape_4d.dims.Length - 2], + image_shape_4d.dims[image_shape_4d.dims.Length - 1]}); + + var image_shape_last_three_elements2_4d = image_shape_4d["-3:"]; + + Assert.IsTrue(Equals(image_shape_last_three_elements_4d.dims, image_shape_last_three_elements2_4d.dims), "4dims get fail."); + } + + } +} \ No newline at end of file