test_pickle_data_node.py 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import pathlib
  13. from datetime import datetime
  14. from time import sleep
  15. import pandas as pd
  16. import pytest
  17. from taipy.config.common.scope import Scope
  18. from taipy.config.config import Config
  19. from taipy.config.exceptions.exceptions import InvalidConfigurationId
  20. from taipy.core.data._data_manager import _DataManager
  21. from taipy.core.data.pickle import PickleDataNode
  22. from taipy.core.exceptions.exceptions import NoData
  23. @pytest.fixture(scope="function", autouse=True)
  24. def cleanup():
  25. yield
  26. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.p")
  27. if os.path.isfile(path):
  28. os.remove(path)
  29. class TestPickleDataNodeEntity:
  30. @pytest.fixture(scope="function", autouse=True)
  31. def remove_pickle_files(self):
  32. yield
  33. import glob
  34. for f in glob.glob("*.p"):
  35. os.remove(f)
  36. def test_create(self):
  37. dn = PickleDataNode("foobar_bazxyxea", Scope.SCENARIO, properties={"default_data": "Data"})
  38. assert os.path.isfile(Config.core.storage_folder + "pickles/" + dn.id + ".p")
  39. assert isinstance(dn, PickleDataNode)
  40. assert dn.storage_type() == "pickle"
  41. assert dn.config_id == "foobar_bazxyxea"
  42. assert dn.scope == Scope.SCENARIO
  43. assert dn.id is not None
  44. assert dn.name is None
  45. assert dn.owner_id is None
  46. assert dn.last_edit_date is not None
  47. assert dn.job_ids == []
  48. assert dn.is_ready_for_reading
  49. assert dn.read() == "Data"
  50. assert dn.last_edit_date is not None
  51. assert dn.job_ids == []
  52. with pytest.raises(InvalidConfigurationId):
  53. PickleDataNode("foobar bazxyxea", Scope.SCENARIO, properties={"default_data": "Data"})
  54. def test_get_user_properties(self, pickle_file_path):
  55. dn_1 = PickleDataNode("dn_1", Scope.SCENARIO, properties={"path": pickle_file_path})
  56. assert dn_1._get_user_properties() == {}
  57. dn_2 = PickleDataNode(
  58. "dn_2",
  59. Scope.SCENARIO,
  60. properties={
  61. "default_data": "foo",
  62. "default_path": pickle_file_path,
  63. "foo": "bar",
  64. },
  65. )
  66. # default_data, default_path, path, is_generated are filtered out
  67. assert dn_2._get_user_properties() == {"foo": "bar"}
  68. def test_new_pickle_data_node_with_existing_file_is_ready_for_reading(self):
  69. not_ready_dn_cfg = Config.configure_data_node("not_ready_data_node_config_id", "pickle", path="NOT_EXISTING.p")
  70. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.p")
  71. ready_dn_cfg = Config.configure_data_node("ready_data_node_config_id", "pickle", path=path)
  72. dns = _DataManager._bulk_get_or_create([not_ready_dn_cfg, ready_dn_cfg])
  73. assert not dns[not_ready_dn_cfg].is_ready_for_reading
  74. assert dns[ready_dn_cfg].is_ready_for_reading
  75. def test_create_with_file_name(self):
  76. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar", "path": "foo.FILE.p"})
  77. assert os.path.isfile("foo.FILE.p")
  78. assert dn.read() == "bar"
  79. dn.write("qux")
  80. assert dn.read() == "qux"
  81. dn.write(1998)
  82. assert dn.read() == 1998
  83. def test_read_and_write(self):
  84. no_data_dn = PickleDataNode("foo", Scope.SCENARIO)
  85. with pytest.raises(NoData):
  86. assert no_data_dn.read() is None
  87. no_data_dn.read_or_raise()
  88. pickle_str = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
  89. assert isinstance(pickle_str.read(), str)
  90. assert pickle_str.read() == "bar"
  91. pickle_str.properties["default_data"] = "baz" # this modifies the default data value but not the data itself
  92. assert pickle_str.read() == "bar"
  93. pickle_str.write("qux")
  94. assert pickle_str.read() == "qux"
  95. pickle_str.write(1998)
  96. assert pickle_str.read() == 1998
  97. assert isinstance(pickle_str.read(), int)
  98. pickle_int = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": 197})
  99. assert isinstance(pickle_int.read(), int)
  100. assert pickle_int.read() == 197
  101. pickle_dict = PickleDataNode(
  102. "foo", Scope.SCENARIO, properties={"default_data": {"bar": 12, "baz": "qux", "quux": [13]}}
  103. )
  104. assert isinstance(pickle_dict.read(), dict)
  105. assert pickle_dict.read() == {"bar": 12, "baz": "qux", "quux": [13]}
  106. def test_path_overrides_default_path(self):
  107. dn = PickleDataNode(
  108. "foo",
  109. Scope.SCENARIO,
  110. properties={
  111. "default_data": "bar",
  112. "default_path": "foo.FILE.p",
  113. "path": "bar.FILE.p",
  114. },
  115. )
  116. assert dn.path == "bar.FILE.p"
  117. def test_set_path(self):
  118. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_path": "foo.p"})
  119. assert dn.path == "foo.p"
  120. dn.path = "bar.p"
  121. assert dn.path == "bar.p"
  122. def test_is_generated(self):
  123. dn = PickleDataNode("foo", Scope.SCENARIO, properties={})
  124. assert dn.is_generated
  125. dn.path = "bar.p"
  126. assert not dn.is_generated
  127. def test_read_write_after_modify_path(self):
  128. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.p")
  129. new_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.p")
  130. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_path": path})
  131. read_data = dn.read()
  132. assert read_data is not None
  133. dn.path = new_path
  134. with pytest.raises(FileNotFoundError):
  135. dn.read()
  136. dn.write({"other": "stuff"})
  137. assert dn.read() == {"other": "stuff"}
  138. def test_get_system_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
  139. temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.pickle"))
  140. pd.DataFrame([]).to_pickle(temp_file_path)
  141. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "pandas"})
  142. dn.write(pd.DataFrame([1, 2, 3]))
  143. previous_edit_date = dn.last_edit_date
  144. sleep(0.1)
  145. pd.DataFrame([4, 5, 6]).to_pickle(temp_file_path)
  146. new_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  147. assert previous_edit_date < dn.last_edit_date
  148. assert new_edit_date == dn.last_edit_date
  149. sleep(0.1)
  150. dn.write(pd.DataFrame([7, 8, 9]))
  151. assert new_edit_date < dn.last_edit_date
  152. os.unlink(temp_file_path)
  153. def test_migrate_to_new_path(self, tmp_path):
  154. _base_path = os.path.join(tmp_path, ".data")
  155. path = os.path.join(_base_path, "test.p")
  156. # create a file on old path
  157. os.mkdir(_base_path)
  158. with open(path, "w"):
  159. pass
  160. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar", "path": path})
  161. assert ".data" not in dn.path.name
  162. assert os.path.exists(dn.path)