test_pickle_data_node.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319
  1. # Copyright 2021-2025 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import pathlib
  13. import pickle
  14. import re
  15. from datetime import datetime, timedelta
  16. from time import sleep
  17. import freezegun
  18. import pandas as pd
  19. import pytest
  20. from pandas.testing import assert_frame_equal
  21. from taipy import Scope
  22. from taipy.common.config import Config
  23. from taipy.common.config.exceptions.exceptions import InvalidConfigurationId
  24. from taipy.core.common._utils import _normalize_path
  25. from taipy.core.data._data_manager import _DataManager
  26. from taipy.core.data._data_manager_factory import _DataManagerFactory
  27. from taipy.core.data.pickle import PickleDataNode
  28. from taipy.core.exceptions.exceptions import NoData
  29. from taipy.core.reason import NoFileToDownload, NotAFile
  30. @pytest.fixture(scope="function", autouse=True)
  31. def cleanup():
  32. yield
  33. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.p")
  34. if os.path.isfile(path):
  35. os.remove(path)
  36. class TestPickleDataNodeEntity:
  37. @pytest.fixture(scope="function", autouse=True)
  38. def remove_pickle_files(self):
  39. yield
  40. import glob
  41. for f in glob.glob("*.p"):
  42. os.remove(f)
  43. def test_create_with_manager(self, pickle_file_path):
  44. parquet_dn_config = Config.configure_pickle_data_node(id="baz", default_path=pickle_file_path)
  45. parquet_dn = _DataManagerFactory._build_manager()._create(parquet_dn_config, None, None)
  46. assert isinstance(parquet_dn, PickleDataNode)
  47. def test_create(self):
  48. pickle_dn_config = Config.configure_pickle_data_node(
  49. id="foobar_bazxyxea", default_path="Data", default_data="Data"
  50. )
  51. dn = _DataManagerFactory._build_manager()._create(pickle_dn_config, None, None)
  52. assert isinstance(dn, PickleDataNode)
  53. assert dn.storage_type() == "pickle"
  54. assert dn.config_id == "foobar_bazxyxea"
  55. assert dn.scope == Scope.SCENARIO
  56. assert dn.id is not None
  57. assert dn.name is None
  58. assert dn.owner_id is None
  59. assert dn.last_edit_date is not None
  60. assert dn.job_ids == []
  61. assert dn.is_ready_for_reading
  62. assert dn.read() == "Data"
  63. assert dn.last_edit_date is not None
  64. assert dn.job_ids == []
  65. with pytest.raises(InvalidConfigurationId):
  66. PickleDataNode("foobar bazxyxea", Scope.SCENARIO, properties={"default_data": "Data"})
  67. def test_get_user_properties(self, pickle_file_path):
  68. dn_1 = PickleDataNode("dn_1", Scope.SCENARIO, properties={"path": pickle_file_path})
  69. assert dn_1._get_user_properties() == {}
  70. dn_2 = PickleDataNode(
  71. "dn_2",
  72. Scope.SCENARIO,
  73. properties={
  74. "default_data": "foo",
  75. "default_path": pickle_file_path,
  76. "foo": "bar",
  77. },
  78. )
  79. # default_data, default_path, path, is_generated are filtered out
  80. assert dn_2._get_user_properties() == {"foo": "bar"}
  81. def test_new_pickle_data_node_with_existing_file_is_ready_for_reading(self):
  82. not_ready_dn_cfg = Config.configure_data_node("not_ready_data_node_config_id", "pickle", path="NOT_EXISTING.p")
  83. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.p")
  84. ready_dn_cfg = Config.configure_data_node("ready_data_node_config_id", "pickle", path=path)
  85. dns = _DataManager._bulk_get_or_create([not_ready_dn_cfg, ready_dn_cfg])
  86. assert not dns[not_ready_dn_cfg].is_ready_for_reading
  87. assert dns[ready_dn_cfg].is_ready_for_reading
  88. def test_create_with_file_name(self):
  89. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar", "path": "foo.FILE.p"})
  90. _DataManagerFactory._build_manager()._repository._save(dn)
  91. assert os.path.isfile("foo.FILE.p")
  92. assert dn.read() == "bar"
  93. dn.write("qux")
  94. assert dn.read() == "qux"
  95. dn.write(1998)
  96. assert dn.read() == 1998
  97. def test_read_and_write(self):
  98. no_data_dn = PickleDataNode("foo", Scope.SCENARIO)
  99. _DataManagerFactory._build_manager()._repository._save(no_data_dn)
  100. assert no_data_dn.read() is None
  101. with pytest.raises(NoData):
  102. _DataManagerFactory._build_manager()._read(no_data_dn)
  103. with pytest.raises(NoData):
  104. no_data_dn.read_or_raise()
  105. pickle_str = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
  106. _DataManagerFactory._build_manager()._repository._save(pickle_str)
  107. assert isinstance(pickle_str.read(), str)
  108. assert pickle_str.read() == "bar"
  109. pickle_str.properties["default_data"] = "baz" # this modifies the default data value but not the data itself
  110. assert pickle_str.read() == "bar"
  111. pickle_str.write("qux")
  112. assert pickle_str.read() == "qux"
  113. pickle_str.write(1998)
  114. assert pickle_str.read() == 1998
  115. assert isinstance(pickle_str.read(), int)
  116. pickle_int = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": 197})
  117. assert isinstance(pickle_int.read(), int)
  118. assert pickle_int.read() == 197
  119. pickle_dict = PickleDataNode(
  120. "foo", Scope.SCENARIO, properties={"default_data": {"bar": 12, "baz": "qux", "quux": [13]}}
  121. )
  122. assert isinstance(pickle_dict.read(), dict)
  123. assert pickle_dict.read() == {"bar": 12, "baz": "qux", "quux": [13]}
  124. def test_path_overrides_default_path(self):
  125. dn = PickleDataNode(
  126. "foo",
  127. Scope.SCENARIO,
  128. properties={
  129. "default_data": "bar",
  130. "default_path": "foo.FILE.p",
  131. "path": "bar.FILE.p",
  132. },
  133. )
  134. _DataManagerFactory._build_manager()._repository._save(dn)
  135. assert dn.path == "bar.FILE.p"
  136. def test_set_path(self):
  137. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_path": "foo.p"})
  138. _DataManagerFactory._build_manager()._repository._save(dn)
  139. assert dn.path == "foo.p"
  140. dn.path = "bar.p"
  141. assert dn.path == "bar.p"
  142. def test_is_generated(self):
  143. dn = PickleDataNode("foo", Scope.SCENARIO, properties={})
  144. _DataManagerFactory._build_manager()._repository._save(dn)
  145. assert dn.is_generated
  146. dn.path = "bar.p"
  147. assert not dn.is_generated
  148. def test_read_write_after_modify_path(self):
  149. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.p")
  150. new_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.p")
  151. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_path": path})
  152. _DataManagerFactory._build_manager()._repository._save(dn)
  153. read_data = dn.read()
  154. assert read_data is not None
  155. dn.path = new_path
  156. with pytest.raises(FileNotFoundError):
  157. dn.read()
  158. dn.write({"other": "stuff"})
  159. assert dn.read() == {"other": "stuff"}
  160. def test_get_system_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
  161. temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.pickle"))
  162. pd.DataFrame([]).to_pickle(temp_file_path)
  163. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "pandas"})
  164. _DataManagerFactory._build_manager()._repository._save(dn)
  165. dn.write(pd.DataFrame([1, 2, 3]))
  166. previous_edit_date = dn.last_edit_date
  167. sleep(0.1)
  168. pd.DataFrame([4, 5, 6]).to_pickle(temp_file_path)
  169. new_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  170. assert previous_edit_date < dn.last_edit_date
  171. assert new_edit_date == dn.last_edit_date
  172. sleep(0.1)
  173. dn.write(pd.DataFrame([7, 8, 9]))
  174. assert new_edit_date < dn.last_edit_date
  175. os.unlink(temp_file_path)
  176. def test_migrate_to_new_path(self, tmp_path):
  177. _base_path = os.path.join(tmp_path, ".data")
  178. path = os.path.join(_base_path, "test.p")
  179. # create a file on old path
  180. os.mkdir(_base_path)
  181. with open(path, "w"):
  182. pass
  183. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar", "path": path})
  184. assert ".data" not in dn.path
  185. assert os.path.exists(dn.path)
  186. def test_is_downloadable(self):
  187. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.p")
  188. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": path})
  189. reasons = dn.is_downloadable()
  190. assert reasons
  191. assert reasons.reasons == ""
  192. def test_is_not_downloadable_no_file(self):
  193. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/wrong_path.p")
  194. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": path})
  195. reasons = dn.is_downloadable()
  196. assert not reasons
  197. assert not reasons
  198. assert len(reasons._reasons) == 1
  199. assert str(NoFileToDownload(_normalize_path(path), dn.id)) in reasons.reasons
  200. def test_is_not_downloadable_not_a_file(self):
  201. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample")
  202. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": path})
  203. reasons = dn.is_downloadable()
  204. assert not reasons
  205. assert len(reasons._reasons) == 1
  206. assert str(NotAFile(_normalize_path(path), dn.id)) in reasons.reasons
  207. def test_get_download_path(self):
  208. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.p")
  209. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": path})
  210. assert re.split(r"[\\/]", dn._get_downloadable_path()) == re.split(r"[\\/]", path)
  211. def test_get_download_path_with_not_existed_file(self):
  212. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTED.p"})
  213. assert dn._get_downloadable_path() == ""
  214. def test_upload(self, pickle_file_path, tmpdir_factory):
  215. old_pickle_path = tmpdir_factory.mktemp("data").join("df.p").strpath
  216. old_data = pd.DataFrame([{"a": 0, "b": 1, "c": 2}, {"a": 3, "b": 4, "c": 5}])
  217. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": old_pickle_path})
  218. _DataManagerFactory._build_manager()._repository._save(dn)
  219. dn.write(old_data)
  220. old_last_edit_date = dn.last_edit_date
  221. upload_content = pd.read_pickle(pickle_file_path)
  222. with freezegun.freeze_time(old_last_edit_date + timedelta(seconds=1)):
  223. dn._upload(pickle_file_path)
  224. assert_frame_equal(dn.read(), upload_content) # The content of the dn should change to the uploaded content
  225. assert dn.last_edit_date > old_last_edit_date
  226. assert dn.path == _normalize_path(old_pickle_path) # The path of the dn should not change
  227. def test_upload_with_upload_check(self, pickle_file_path, tmpdir_factory):
  228. old_pickle_path = tmpdir_factory.mktemp("data").join("df.p").strpath
  229. old_data = pd.DataFrame([{"a": 0, "b": 1, "c": 2}, {"a": 3, "b": 4, "c": 5}])
  230. dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": old_pickle_path})
  231. _DataManagerFactory._build_manager()._repository._save(dn)
  232. dn.write(old_data)
  233. old_last_edit_date = dn.last_edit_date
  234. def check_data_column(upload_path, upload_data):
  235. return upload_path.endswith(".p") and upload_data.columns.tolist() == ["a", "b", "c"]
  236. not_exists_json_path = tmpdir_factory.mktemp("data").join("not_exists.json").strpath
  237. reasons = dn._upload(not_exists_json_path, upload_checker=check_data_column)
  238. assert bool(reasons) is False
  239. assert (
  240. str(list(reasons._reasons[dn.id])[0]) == "The uploaded file 'not_exists.json' can not be read,"
  241. f" therefore is not a valid data file for data node '{dn.id}'"
  242. )
  243. not_pickle_path = tmpdir_factory.mktemp("data").join("wrong_format_df.not_pickle").strpath
  244. with open(str(not_pickle_path), "wb") as f:
  245. pickle.dump(pd.DataFrame([{"a": 1, "b": 2, "d": 3}, {"a": 4, "b": 5, "d": 6}]), f)
  246. # The upload should fail when the file is not a pickle
  247. reasons = dn._upload(not_pickle_path, upload_checker=check_data_column)
  248. assert bool(reasons) is False
  249. assert (
  250. str(list(reasons._reasons[dn.id])[0])
  251. == f"The uploaded file 'wrong_format_df.not_pickle' has invalid data for data node '{dn.id}'"
  252. )
  253. wrong_format_pickle_path = tmpdir_factory.mktemp("data").join("wrong_format_df.p").strpath
  254. with open(str(wrong_format_pickle_path), "wb") as f:
  255. pickle.dump(pd.DataFrame([{"a": 1, "b": 2, "d": 3}, {"a": 4, "b": 5, "d": 6}]), f)
  256. # The upload should fail when check_data_column() return False
  257. reasons = dn._upload(wrong_format_pickle_path, upload_checker=check_data_column)
  258. assert bool(reasons) is False
  259. assert (
  260. str(list(reasons._reasons[dn.id])[0])
  261. == f"The uploaded file 'wrong_format_df.p' has invalid data for data node '{dn.id}'"
  262. )
  263. assert_frame_equal(dn.read(), old_data) # The content of the dn should not change when upload fails
  264. assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails
  265. assert dn.path == _normalize_path(old_pickle_path) # The path of the dn should not change
  266. # The upload should succeed when check_data_column() return True
  267. assert dn._upload(pickle_file_path, upload_checker=check_data_column)