You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_project.py 5.9 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. from util import save_and_check_tuple
  16. import mindspore.dataset as ds
  17. import mindspore.dataset.transforms.c_transforms as C
  18. from mindspore.common import dtype as mstype
  19. DATA_DIR_TF = ["../data/dataset/testTFTestAllTypes/test.data"]
  20. SCHEMA_DIR_TF = "../data/dataset/testTFTestAllTypes/datasetSchema.json"
  21. GENERATE_GOLDEN = False
  22. def test_case_project_single_column():
  23. columns = ["col_sint32"]
  24. parameters = {"params": {'columns': columns}}
  25. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  26. data1 = data1.project(columns=columns)
  27. filename = "project_single_column_result.npz"
  28. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  29. def test_case_project_multiple_columns_in_order():
  30. columns = ["col_sint16", "col_float", "col_2d"]
  31. parameters = {"params": {'columns': columns}}
  32. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  33. data1 = data1.project(columns=columns)
  34. filename = "project_multiple_columns_in_order_result.npz"
  35. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  36. def test_case_project_multiple_columns_out_of_order():
  37. columns = ["col_3d", "col_sint64", "col_2d"]
  38. parameters = {"params": {'columns': columns}}
  39. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  40. data1 = data1.project(columns=columns)
  41. filename = "project_multiple_columns_out_of_order_result.npz"
  42. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  43. def test_case_project_map():
  44. columns = ["col_3d", "col_sint64", "col_2d"]
  45. parameters = {"params": {'columns': columns}}
  46. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  47. data1 = data1.project(columns=columns)
  48. type_cast_op = C.TypeCast(mstype.int64)
  49. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  50. filename = "project_map_after_result.npz"
  51. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  52. def test_case_map_project():
  53. columns = ["col_3d", "col_sint64", "col_2d"]
  54. parameters = {"params": {'columns': columns}}
  55. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  56. type_cast_op = C.TypeCast(mstype.int64)
  57. data1 = data1.map(input_columns=["col_sint64"], operations=type_cast_op)
  58. data1 = data1.project(columns=columns)
  59. filename = "project_map_before_result.npz"
  60. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  61. def test_case_project_between_maps():
  62. columns = ["col_3d", "col_sint64", "col_2d"]
  63. parameters = {"params": {'columns': columns}}
  64. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  65. type_cast_op = C.TypeCast(mstype.int64)
  66. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  67. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  68. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  69. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  70. data1 = data1.project(columns=columns)
  71. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  72. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  73. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  74. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  75. data1 = data1.map(input_columns=["col_3d"], operations=type_cast_op)
  76. filename = "project_between_maps_result.npz"
  77. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  78. def test_case_project_repeat():
  79. columns = ["col_3d", "col_sint64", "col_2d"]
  80. parameters = {"params": {'columns': columns}}
  81. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  82. data1 = data1.project(columns=columns)
  83. repeat_count = 3
  84. data1 = data1.repeat(repeat_count)
  85. filename = "project_before_repeat_result.npz"
  86. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  87. def test_case_repeat_project():
  88. columns = ["col_3d", "col_sint64", "col_2d"]
  89. parameters = {"params": {'columns': columns}}
  90. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  91. repeat_count = 3
  92. data1 = data1.repeat(repeat_count)
  93. data1 = data1.project(columns=columns)
  94. filename = "project_after_repeat_result.npz"
  95. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)
  96. def test_case_map_project_map_project():
  97. columns = ["col_3d", "col_sint64", "col_2d"]
  98. parameters = {"params": {'columns': columns}}
  99. data1 = ds.TFRecordDataset(DATA_DIR_TF, SCHEMA_DIR_TF, shuffle=False)
  100. type_cast_op = C.TypeCast(mstype.int64)
  101. data1 = data1.map(input_columns=["col_sint64"], operations=type_cast_op)
  102. data1 = data1.project(columns=columns)
  103. data1 = data1.map(input_columns=["col_2d"], operations=type_cast_op)
  104. data1 = data1.project(columns=columns)
  105. filename = "project_alternate_parallel_inline_result.npz"
  106. save_and_check_tuple(data1, parameters, filename, generate_golden=GENERATE_GOLDEN)