You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_repeat.py 9.3 kB

5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. """
  16. Test Repeat Op
  17. """
  18. import numpy as np
  19. import mindspore.dataset as ds
  20. import mindspore.dataset.transforms.vision.c_transforms as vision
  21. from mindspore import log as logger
  22. from util import save_and_check_dict
  23. DATA_DIR_TF = ["../data/dataset/testTFTestAllTypes/test.data"]
  24. SCHEMA_DIR_TF = "../data/dataset/testTFTestAllTypes/datasetSchema.json"
  25. DATA_DIR_TF2 = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
  26. SCHEMA_DIR_TF2 = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
  27. GENERATE_GOLDEN = False
  28. def test_tf_repeat_01():
  29. """
  30. Test a simple repeat operation.
  31. """
  32. logger.info("Test Simple Repeat")
  33. # define parameters
  34. repeat_count = 2
  35. # apply dataset operations
  36. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  37. data1 = data1.repeat(repeat_count)
  38. filename = "repeat_result.npz"
  39. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  40. def test_tf_repeat_02():
  41. """
  42. Test Infinite Repeat.
  43. """
  44. logger.info("Test Infinite Repeat")
  45. # define parameters
  46. repeat_count = -1
  47. # apply dataset operations
  48. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  49. data1 = data1.repeat(repeat_count)
  50. itr = 0
  51. for _ in data1:
  52. itr = itr + 1
  53. if itr == 100:
  54. break
  55. assert itr == 100
  56. def test_tf_repeat_03():
  57. """
  58. Test Repeat then Batch.
  59. """
  60. logger.info("Test Repeat then Batch")
  61. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  62. batch_size = 32
  63. resize_height, resize_width = 32, 32
  64. decode_op = vision.Decode()
  65. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  66. data1 = data1.map(input_columns=["image"], operations=decode_op)
  67. data1 = data1.map(input_columns=["image"], operations=resize_op)
  68. data1 = data1.repeat(22)
  69. data1 = data1.batch(batch_size, drop_remainder=True)
  70. num_iter = 0
  71. for _ in data1.create_dict_iterator():
  72. num_iter += 1
  73. logger.info("Number of tf data in data1: {}".format(num_iter))
  74. assert num_iter == 2
  75. def test_tf_repeat_04():
  76. """
  77. Test a simple repeat operation with column list.
  78. """
  79. logger.info("Test Simple Repeat Column List")
  80. # define parameters
  81. repeat_count = 2
  82. columns_list = ["col_sint64", "col_sint32"]
  83. # apply dataset operations
  84. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, columns_list=columns_list, shuffle=False)
  85. data1 = data1.repeat(repeat_count)
  86. filename = "repeat_list_result.npz"
  87. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  88. def generator():
  89. for i in range(3):
  90. (yield np.array([i]),)
  91. def test_nested_repeat1():
  92. logger.info("test_nested_repeat1")
  93. data = ds.GeneratorDataset(generator, ["data"])
  94. data = data.repeat(2)
  95. data = data.repeat(3)
  96. for i, d in enumerate(data):
  97. assert i % 3 == d[0][0]
  98. assert sum([1 for _ in data]) == 2 * 3 * 3
  99. def test_nested_repeat2():
  100. logger.info("test_nested_repeat2")
  101. data = ds.GeneratorDataset(generator, ["data"])
  102. data = data.repeat(1)
  103. data = data.repeat(1)
  104. for i, d in enumerate(data):
  105. assert i % 3 == d[0][0]
  106. assert sum([1 for _ in data]) == 3
  107. def test_nested_repeat3():
  108. logger.info("test_nested_repeat3")
  109. data = ds.GeneratorDataset(generator, ["data"])
  110. data = data.repeat(1)
  111. data = data.repeat(2)
  112. for i, d in enumerate(data):
  113. assert i % 3 == d[0][0]
  114. assert sum([1 for _ in data]) == 2 * 3
  115. def test_nested_repeat4():
  116. logger.info("test_nested_repeat4")
  117. data = ds.GeneratorDataset(generator, ["data"])
  118. data = data.repeat(2)
  119. data = data.repeat(1)
  120. for i, d in enumerate(data):
  121. assert i % 3 == d[0][0]
  122. assert sum([1 for _ in data]) == 2 * 3
  123. def test_nested_repeat5():
  124. logger.info("test_nested_repeat5")
  125. data = ds.GeneratorDataset(generator, ["data"])
  126. data = data.batch(3)
  127. data = data.repeat(2)
  128. data = data.repeat(3)
  129. for _, d in enumerate(data):
  130. assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
  131. assert sum([1 for _ in data]) == 6
  132. def test_nested_repeat6():
  133. logger.info("test_nested_repeat6")
  134. data = ds.GeneratorDataset(generator, ["data"])
  135. data = data.repeat(2)
  136. data = data.batch(3)
  137. data = data.repeat(3)
  138. for _, d in enumerate(data):
  139. assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
  140. assert sum([1 for _ in data]) == 6
  141. def test_nested_repeat7():
  142. logger.info("test_nested_repeat7")
  143. data = ds.GeneratorDataset(generator, ["data"])
  144. data = data.repeat(2)
  145. data = data.repeat(3)
  146. data = data.batch(3)
  147. for _, d in enumerate(data):
  148. assert np.array_equal(d[0], np.asarray([[0], [1], [2]]))
  149. assert sum([1 for _ in data]) == 6
  150. def test_nested_repeat8():
  151. logger.info("test_nested_repeat8")
  152. data = ds.GeneratorDataset(generator, ["data"])
  153. data = data.batch(2, drop_remainder=False)
  154. data = data.repeat(2)
  155. data = data.repeat(3)
  156. for i, d in enumerate(data):
  157. if i % 2 == 0:
  158. assert np.array_equal(d[0], np.asarray([[0], [1]]))
  159. else:
  160. assert np.array_equal(d[0], np.asarray([[2]]))
  161. assert sum([1 for _ in data]) == 6 * 2
  162. def test_nested_repeat9():
  163. logger.info("test_nested_repeat9")
  164. data = ds.GeneratorDataset(generator, ["data"])
  165. data = data.repeat()
  166. data = data.repeat(3)
  167. for i, d in enumerate(data):
  168. assert i % 3 == d[0][0]
  169. if i == 10:
  170. break
  171. def test_nested_repeat10():
  172. logger.info("test_nested_repeat10")
  173. data = ds.GeneratorDataset(generator, ["data"])
  174. data = data.repeat(3)
  175. data = data.repeat()
  176. for i, d in enumerate(data):
  177. assert i % 3 == d[0][0]
  178. if i == 10:
  179. break
  180. def test_nested_repeat11():
  181. logger.info("test_nested_repeat11")
  182. data = ds.GeneratorDataset(generator, ["data"])
  183. data = data.repeat(2)
  184. data = data.repeat(3)
  185. data = data.repeat(4)
  186. data = data.repeat(5)
  187. for i, d in enumerate(data):
  188. assert i % 3 == d[0][0]
  189. assert sum([1 for _ in data]) == 2 * 3 * 4 * 5 * 3
  190. def test_repeat_count1():
  191. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  192. data1_size = data1.get_dataset_size()
  193. logger.info("dataset size is {}".format(data1_size))
  194. batch_size = 2
  195. repeat_count = 4
  196. resize_height, resize_width = 32, 32
  197. decode_op = vision.Decode()
  198. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  199. data1 = data1.map(input_columns=["image"], operations=decode_op)
  200. data1 = data1.map(input_columns=["image"], operations=resize_op)
  201. data1 = data1.repeat(repeat_count)
  202. data1 = data1.batch(batch_size, drop_remainder=False)
  203. dataset_size = data1.get_dataset_size()
  204. logger.info("dataset repeat then batch's size is {}".format(dataset_size))
  205. num1_iter = 0
  206. for _ in data1.create_dict_iterator():
  207. num1_iter += 1
  208. assert data1_size == 3
  209. assert dataset_size == num1_iter == 6
  210. def test_repeat_count2():
  211. data1 = ds.TFRecordDataset(DATA_DIR_TF2, SCHEMA_DIR_TF2, shuffle=False)
  212. data1_size = data1.get_dataset_size()
  213. logger.info("dataset size is {}".format(data1_size))
  214. batch_size = 2
  215. repeat_count = 4
  216. resize_height, resize_width = 32, 32
  217. decode_op = vision.Decode()
  218. resize_op = vision.Resize((resize_height, resize_width), interpolation=ds.transforms.vision.Inter.LINEAR)
  219. data1 = data1.map(input_columns=["image"], operations=decode_op)
  220. data1 = data1.map(input_columns=["image"], operations=resize_op)
  221. data1 = data1.batch(batch_size, drop_remainder=False)
  222. data1 = data1.repeat(repeat_count)
  223. dataset_size = data1.get_dataset_size()
  224. logger.info("dataset batch then repeat's size is {}".format(dataset_size))
  225. num1_iter = 0
  226. for _ in data1.create_dict_iterator():
  227. num1_iter += 1
  228. assert data1_size == 3
  229. assert dataset_size == num1_iter == 8
  230. if __name__ == "__main__":
  231. test_tf_repeat_01()
  232. test_tf_repeat_02()
  233. test_tf_repeat_03()
  234. test_tf_repeat_04()
  235. test_nested_repeat1()
  236. test_nested_repeat2()
  237. test_nested_repeat3()
  238. test_nested_repeat4()
  239. test_nested_repeat5()
  240. test_nested_repeat6()
  241. test_nested_repeat7()
  242. test_nested_repeat8()
  243. test_nested_repeat9()
  244. test_nested_repeat10()
  245. test_nested_repeat11()
  246. test_repeat_count1()
  247. test_repeat_count2()