You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_batch.py 14 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import mindspore.dataset as ds
  16. from mindspore import log as logger
  17. from util import save_and_check_dict
  18. # Note: Number of rows in test.data dataset: 12
  19. DATA_DIR = ["../data/dataset/testTFTestAllTypes/test.data"]
  20. GENERATE_GOLDEN = False
  21. def test_batch_01():
  22. """
  23. Test batch: batch_size>1, drop_remainder=True, no remainder exists
  24. """
  25. logger.info("test_batch_01")
  26. # define parameters
  27. batch_size = 2
  28. drop_remainder = True
  29. # apply dataset operations
  30. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  31. data1 = data1.batch(batch_size, drop_remainder)
  32. assert sum([1 for _ in data1]) == 6
  33. filename = "batch_01_result.npz"
  34. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  35. def test_batch_02():
  36. """
  37. Test batch: batch_size>1, drop_remainder=True, remainder exists
  38. """
  39. logger.info("test_batch_02")
  40. # define parameters
  41. batch_size = 5
  42. drop_remainder = True
  43. # apply dataset operations
  44. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  45. data1 = data1.batch(batch_size, drop_remainder=drop_remainder)
  46. assert sum([1 for _ in data1]) == 2
  47. filename = "batch_02_result.npz"
  48. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  49. def test_batch_03():
  50. """
  51. Test batch: batch_size>1, drop_remainder=False, no remainder exists
  52. """
  53. logger.info("test_batch_03")
  54. # define parameters
  55. batch_size = 3
  56. drop_remainder = False
  57. # apply dataset operations
  58. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  59. data1 = data1.batch(batch_size=batch_size, drop_remainder=drop_remainder)
  60. assert sum([1 for _ in data1]) == 4
  61. filename = "batch_03_result.npz"
  62. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  63. def test_batch_04():
  64. """
  65. Test batch: batch_size>1, drop_remainder=False, remainder exists
  66. """
  67. logger.info("test_batch_04")
  68. # define parameters
  69. batch_size = 7
  70. drop_remainder = False
  71. # apply dataset operations
  72. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  73. data1 = data1.batch(batch_size, drop_remainder)
  74. assert sum([1 for _ in data1]) == 2
  75. filename = "batch_04_result.npz"
  76. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  77. def test_batch_05():
  78. """
  79. Test batch: batch_size=1 (minimum valid size), drop_remainder default
  80. """
  81. logger.info("test_batch_05")
  82. # define parameters
  83. batch_size = 1
  84. # apply dataset operations
  85. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  86. data1 = data1.batch(batch_size)
  87. assert sum([1 for _ in data1]) == 12
  88. filename = "batch_05_result.npz"
  89. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  90. def test_batch_06():
  91. """
  92. Test batch: batch_size = number-of-rows-in-dataset, drop_remainder=True, reorder params
  93. """
  94. logger.info("test_batch_06")
  95. # define parameters
  96. batch_size = 12
  97. drop_remainder = False
  98. # apply dataset operations
  99. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  100. data1 = data1.batch(drop_remainder=drop_remainder, batch_size=batch_size)
  101. assert sum([1 for _ in data1]) == 1
  102. filename = "batch_06_result.npz"
  103. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  104. def test_batch_07():
  105. """
  106. Test batch: num_parallel_workers>1, drop_remainder=False, reorder params
  107. """
  108. logger.info("test_batch_07")
  109. # define parameters
  110. batch_size = 4
  111. drop_remainder = False
  112. num_parallel_workers = 2
  113. # apply dataset operations
  114. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  115. data1 = data1.batch(num_parallel_workers=num_parallel_workers, drop_remainder=drop_remainder,
  116. batch_size=batch_size)
  117. assert sum([1 for _ in data1]) == 3
  118. filename = "batch_07_result.npz"
  119. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  120. def test_batch_08():
  121. """
  122. Test batch: num_parallel_workers=1, drop_remainder default
  123. """
  124. logger.info("test_batch_08")
  125. # define parameters
  126. batch_size = 6
  127. num_parallel_workers = 1
  128. # apply dataset operations
  129. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  130. data1 = data1.batch(batch_size, num_parallel_workers=num_parallel_workers)
  131. assert sum([1 for _ in data1]) == 2
  132. filename = "batch_08_result.npz"
  133. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  134. def test_batch_09():
  135. """
  136. Test batch: batch_size > number-of-rows-in-dataset, drop_remainder=False
  137. """
  138. logger.info("test_batch_09")
  139. # define parameters
  140. batch_size = 13
  141. drop_remainder = False
  142. # apply dataset operations
  143. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  144. data1 = data1.batch(batch_size, drop_remainder=drop_remainder)
  145. assert sum([1 for _ in data1]) == 1
  146. filename = "batch_09_result.npz"
  147. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  148. def test_batch_10():
  149. """
  150. Test batch: batch_size > number-of-rows-in-dataset, drop_remainder=True
  151. """
  152. logger.info("test_batch_10")
  153. # define parameters
  154. batch_size = 99
  155. drop_remainder = True
  156. # apply dataset operations
  157. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  158. data1 = data1.batch(batch_size, drop_remainder=drop_remainder)
  159. assert sum([1 for _ in data1]) == 0
  160. filename = "batch_10_result.npz"
  161. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  162. def test_batch_11():
  163. """
  164. Test batch: batch_size=1 and dataset-size=1
  165. """
  166. logger.info("test_batch_11")
  167. # define parameters
  168. batch_size = 1
  169. # apply dataset operations
  170. # Use schema file with 1 row
  171. schema_file = "../data/dataset/testTFTestAllTypes/datasetSchema1Row.json"
  172. data1 = ds.TFRecordDataset(DATA_DIR, schema_file)
  173. data1 = data1.batch(batch_size)
  174. assert sum([1 for _ in data1]) == 1
  175. filename = "batch_11_result.npz"
  176. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  177. def test_batch_12():
  178. """
  179. Test batch: batch_size boolean value True, treated as valid value 1
  180. """
  181. logger.info("test_batch_12")
  182. # define parameters
  183. batch_size = True
  184. # apply dataset operations
  185. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  186. data1 = data1.batch(batch_size=batch_size)
  187. assert sum([1 for _ in data1]) == 12
  188. filename = "batch_12_result.npz"
  189. save_and_check_dict(data1, filename, generate_golden=GENERATE_GOLDEN)
  190. def test_batch_exception_01():
  191. """
  192. Test batch exception: num_parallel_workers=0
  193. """
  194. logger.info("test_batch_exception_01")
  195. # apply dataset operations
  196. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  197. try:
  198. data1 = data1.batch(batch_size=2, drop_remainder=True, num_parallel_workers=0)
  199. sum([1 for _ in data1])
  200. except Exception as e:
  201. logger.info("Got an exception in DE: {}".format(str(e)))
  202. assert "num_parallel_workers" in str(e)
  203. def test_batch_exception_02():
  204. """
  205. Test batch exception: num_parallel_workers<0
  206. """
  207. logger.info("test_batch_exception_02")
  208. # apply dataset operations
  209. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  210. try:
  211. data1 = data1.batch(3, drop_remainder=True, num_parallel_workers=-1)
  212. sum([1 for _ in data1])
  213. except Exception as e:
  214. logger.info("Got an exception in DE: {}".format(str(e)))
  215. assert "num_parallel_workers" in str(e)
  216. def test_batch_exception_03():
  217. """
  218. Test batch exception: batch_size=0
  219. """
  220. logger.info("test_batch_exception_03")
  221. # apply dataset operations
  222. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  223. try:
  224. data1 = data1.batch(batch_size=0)
  225. sum([1 for _ in data1])
  226. except Exception as e:
  227. logger.info("Got an exception in DE: {}".format(str(e)))
  228. assert "batch_size" in str(e)
  229. def test_batch_exception_04():
  230. """
  231. Test batch exception: batch_size<0
  232. """
  233. logger.info("test_batch_exception_04")
  234. # apply dataset operations
  235. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  236. try:
  237. data1 = data1.batch(batch_size=-1)
  238. sum([1 for _ in data1])
  239. except Exception as e:
  240. logger.info("Got an exception in DE: {}".format(str(e)))
  241. assert "batch_size" in str(e)
  242. def test_batch_exception_05():
  243. """
  244. Test batch exception: batch_size boolean value False, treated as invalid value 0
  245. """
  246. logger.info("test_batch_exception_05")
  247. # apply dataset operations
  248. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  249. try:
  250. data1 = data1.batch(batch_size=False)
  251. sum([1 for _ in data1])
  252. except Exception as e:
  253. logger.info("Got an exception in DE: {}".format(str(e)))
  254. assert "batch_size" in str(e)
  255. def test_batch_exception_07():
  256. """
  257. Test batch exception: drop_remainder wrong type
  258. """
  259. logger.info("test_batch_exception_07")
  260. # apply dataset operations
  261. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  262. try:
  263. data1 = data1.batch(3, drop_remainder=0)
  264. sum([1 for _ in data1])
  265. except Exception as e:
  266. logger.info("Got an exception in DE: {}".format(str(e)))
  267. assert "drop_remainder" in str(e)
  268. def test_batch_exception_08():
  269. """
  270. Test batch exception: num_parallel_workers wrong type
  271. """
  272. logger.info("test_batch_exception_08")
  273. # apply dataset operations
  274. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  275. try:
  276. data1 = data1.batch(3, drop_remainder=True, num_parallel_workers=False)
  277. sum([1 for _ in data1])
  278. except Exception as e:
  279. logger.info("Got an exception in DE: {}".format(str(e)))
  280. assert "num_parallel_workers" in str(e)
  281. def test_batch_exception_09():
  282. """
  283. Test batch exception: Missing mandatory batch_size
  284. """
  285. logger.info("test_batch_exception_09")
  286. # apply dataset operations
  287. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  288. try:
  289. data1 = data1.batch(drop_remainder=True, num_parallel_workers=4)
  290. sum([1 for _ in data1])
  291. except Exception as e:
  292. logger.info("Got an exception in DE: {}".format(str(e)))
  293. assert "batch_size" in str(e)
  294. def test_batch_exception_10():
  295. """
  296. Test batch exception: num_parallel_workers>>1
  297. """
  298. logger.info("test_batch_exception_10")
  299. # apply dataset operations
  300. data1 = ds.TFRecordDataset(DATA_DIR, shuffle=ds.Shuffle.FILES)
  301. try:
  302. data1 = data1.batch(batch_size=4, num_parallel_workers=8192)
  303. sum([1 for _ in data1])
  304. except Exception as e:
  305. logger.info("Got an exception in DE: {}".format(str(e)))
  306. assert "num_parallel_workers" in str(e)
  307. def test_batch_exception_11():
  308. """
  309. Test batch exception: wrong input order, num_parallel_workers wrongly used as drop_remainder
  310. """
  311. logger.info("test_batch_exception_11")
  312. # define parameters
  313. batch_size = 6
  314. num_parallel_workers = 1
  315. # apply dataset operations
  316. data1 = ds.TFRecordDataset(DATA_DIR)
  317. try:
  318. data1 = data1.batch(batch_size, num_parallel_workers)
  319. sum([1 for _ in data1])
  320. except Exception as e:
  321. logger.info("Got an exception in DE: {}".format(str(e)))
  322. assert "drop_remainder" in str(e)
  323. def test_batch_exception_12():
  324. """
  325. Test batch exception: wrong input order, drop_remainder wrongly used as batch_size
  326. """
  327. logger.info("test_batch_exception_12")
  328. # define parameters
  329. batch_size = 1
  330. drop_remainder = True
  331. # apply dataset operations
  332. data1 = ds.TFRecordDataset(DATA_DIR)
  333. try:
  334. data1 = data1.batch(drop_remainder, batch_size)
  335. sum([1 for _ in data1])
  336. except Exception as e:
  337. logger.info("Got an exception in DE: {}".format(str(e)))
  338. assert "drop_remainder" in str(e)
  339. def test_batch_exception_13():
  340. """
  341. Test batch exception: invalid input parameter
  342. """
  343. logger.info("test_batch_exception_13")
  344. # define parameters
  345. batch_size = 4
  346. # apply dataset operations
  347. data1 = ds.TFRecordDataset(DATA_DIR)
  348. try:
  349. data1 = data1.batch(batch_size, shard_id=1)
  350. sum([1 for _ in data1])
  351. except Exception as e:
  352. logger.info("Got an exception in DE: {}".format(str(e)))
  353. assert "shard_id" in str(e)
  354. if __name__ == '__main__':
  355. test_batch_01()
  356. test_batch_02()
  357. test_batch_03()
  358. test_batch_04()
  359. test_batch_05()
  360. test_batch_06()
  361. test_batch_07()
  362. test_batch_08()
  363. test_batch_09()
  364. test_batch_10()
  365. test_batch_11()
  366. test_batch_12()
  367. test_batch_exception_01()
  368. test_batch_exception_02()
  369. test_batch_exception_03()
  370. test_batch_exception_04()
  371. test_batch_exception_05()
  372. test_batch_exception_07()
  373. test_batch_exception_08()
  374. test_batch_exception_09()
  375. test_batch_exception_10()
  376. test_batch_exception_11()
  377. test_batch_exception_12()
  378. test_batch_exception_13()
  379. logger.info('\n')