You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_iterator.py 2.8 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15. import numpy as np
  16. import pytest
  17. import mindspore.dataset as ds
  18. from mindspore.dataset.engine.iterators import ITERATORS_LIST, _cleanup
  19. DATA_DIR = ["../data/dataset/testTFTestAllTypes/test.data"]
  20. SCHEMA_DIR = "../data/dataset/testTFTestAllTypes/datasetSchema.json"
  21. COLUMNS = ["col_1d", "col_2d", "col_3d", "col_binary", "col_float",
  22. "col_sint16", "col_sint32", "col_sint64"]
  23. def check(project_columns):
  24. data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=COLUMNS, shuffle=False)
  25. data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=project_columns, shuffle=False)
  26. for data_actual, data_expected in zip(data1.create_tuple_iterator(project_columns), data2.create_tuple_iterator()):
  27. assert len(data_actual) == len(data_expected)
  28. assert all([np.array_equal(d1, d2) for d1, d2 in zip(data_actual, data_expected)])
  29. def test_case_iterator():
  30. """
  31. Test creating tuple iterator
  32. """
  33. check(COLUMNS)
  34. check(COLUMNS[0:1])
  35. check(COLUMNS[0:2])
  36. check(COLUMNS[0:7])
  37. check(COLUMNS[7:8])
  38. check(COLUMNS[0:2:8])
  39. def test_iterator_weak_ref():
  40. ITERATORS_LIST.clear()
  41. data = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR)
  42. itr1 = data.create_tuple_iterator()
  43. itr2 = data.create_tuple_iterator()
  44. itr3 = data.create_tuple_iterator()
  45. assert len(ITERATORS_LIST) == 3
  46. assert sum(itr() is not None for itr in ITERATORS_LIST) == 3
  47. del itr1
  48. assert len(ITERATORS_LIST) == 3
  49. assert sum(itr() is not None for itr in ITERATORS_LIST) == 2
  50. del itr2
  51. assert len(ITERATORS_LIST) == 3
  52. assert sum(itr() is not None for itr in ITERATORS_LIST) == 1
  53. del itr3
  54. assert len(ITERATORS_LIST) == 3
  55. assert sum(itr() is not None for itr in ITERATORS_LIST) == 0
  56. itr1 = data.create_tuple_iterator()
  57. itr2 = data.create_tuple_iterator()
  58. itr3 = data.create_tuple_iterator()
  59. _cleanup()
  60. with pytest.raises(AttributeError) as info:
  61. itr2.get_next()
  62. assert "object has no attribute 'depipeline'" in str(info.value)
  63. del itr1
  64. assert len(ITERATORS_LIST) == 6
  65. assert sum(itr() is not None for itr in ITERATORS_LIST) == 2
  66. _cleanup()