test_parquet_data_node.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import pathlib
  13. import uuid
  14. from datetime import datetime, timedelta
  15. from importlib import util
  16. from time import sleep
  17. import freezegun
  18. import numpy as np
  19. import pandas as pd
  20. import pytest
  21. from pandas.testing import assert_frame_equal
  22. from taipy.common.config import Config
  23. from taipy.common.config.common.scope import Scope
  24. from taipy.common.config.exceptions.exceptions import InvalidConfigurationId
  25. from taipy.core.data._data_manager import _DataManager
  26. from taipy.core.data._data_manager_factory import _DataManagerFactory
  27. from taipy.core.data.data_node_id import DataNodeId
  28. from taipy.core.data.parquet import ParquetDataNode
  29. from taipy.core.exceptions.exceptions import (
  30. InvalidExposedType,
  31. UnknownCompressionAlgorithm,
  32. UnknownParquetEngine,
  33. )
  34. from taipy.core.reason import NoFileToDownload, NotAFile
  35. @pytest.fixture(scope="function", autouse=True)
  36. def cleanup():
  37. yield
  38. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
  39. if os.path.isfile(path):
  40. os.remove(path)
  41. class MyCustomObject:
  42. def __init__(self, id, integer, text):
  43. self.id = id
  44. self.integer = integer
  45. self.text = text
  46. class MyOtherCustomObject:
  47. def __init__(self, id, sentence):
  48. self.id = id
  49. self.sentence = sentence
  50. def create_custom_class(**kwargs):
  51. return MyOtherCustomObject(id=kwargs["id"], sentence=kwargs["text"])
  52. class TestParquetDataNode:
  53. __engine = ["pyarrow"]
  54. if util.find_spec("fastparquet"):
  55. __engine.append("fastparquet")
  56. def test_create(self):
  57. path = "data/node/path"
  58. compression = "snappy"
  59. parquet_dn_config = Config.configure_parquet_data_node(
  60. id="foo_bar", default_path=path, compression=compression, name="super name"
  61. )
  62. dn = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config, None, None)
  63. assert isinstance(dn, ParquetDataNode)
  64. assert dn.storage_type() == "parquet"
  65. assert dn.config_id == "foo_bar"
  66. assert dn.name == "super name"
  67. assert dn.scope == Scope.SCENARIO
  68. assert dn.id is not None
  69. assert dn.owner_id is None
  70. assert dn.last_edit_date is None
  71. assert dn.job_ids == []
  72. assert not dn.is_ready_for_reading
  73. assert dn.path == path
  74. assert dn.properties["exposed_type"] == "pandas"
  75. assert dn.properties["compression"] == "snappy"
  76. assert dn.properties["engine"] == "pyarrow"
  77. parquet_dn_config_1 = Config.configure_parquet_data_node(
  78. id="bar", default_path=path, compression=compression, exposed_type=MyCustomObject
  79. )
  80. dn_1 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_1, None, None)
  81. assert isinstance(dn_1, ParquetDataNode)
  82. assert dn_1.properties["exposed_type"] == MyCustomObject
  83. parquet_dn_config_2 = Config.configure_parquet_data_node(
  84. id="bar", default_path=path, compression=compression, exposed_type=np.ndarray
  85. )
  86. dn_2 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_2, None, None)
  87. assert isinstance(dn_2, ParquetDataNode)
  88. assert dn_2.properties["exposed_type"] == np.ndarray
  89. parquet_dn_config_3 = Config.configure_parquet_data_node(
  90. id="bar", default_path=path, compression=compression, exposed_type=pd.DataFrame
  91. )
  92. dn_3 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_3, None, None)
  93. assert isinstance(dn_3, ParquetDataNode)
  94. assert dn_3.properties["exposed_type"] == pd.DataFrame
  95. with pytest.raises(InvalidConfigurationId):
  96. dn = ParquetDataNode("foo bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
  97. def test_get_user_properties(self, parquet_file_path):
  98. dn_1 = ParquetDataNode("dn_1", Scope.SCENARIO, properties={"path": parquet_file_path})
  99. assert dn_1._get_user_properties() == {}
  100. dn_2 = ParquetDataNode(
  101. "dn_2",
  102. Scope.SCENARIO,
  103. properties={
  104. "exposed_type": "numpy",
  105. "default_data": "foo",
  106. "default_path": parquet_file_path,
  107. "engine": "pyarrow",
  108. "compression": "snappy",
  109. "read_kwargs": {"columns": ["a", "b"]},
  110. "write_kwargs": {"index": False},
  111. "foo": "bar",
  112. },
  113. )
  114. # exposed_type, default_data, default_path, path, engine, compression, read_kwargs, write_kwargs
  115. # are filtered out
  116. assert dn_2._get_user_properties() == {"foo": "bar"}
  117. def test_new_parquet_data_node_with_existing_file_is_ready_for_reading(self, parquet_file_path):
  118. not_ready_dn_cfg = Config.configure_data_node(
  119. "not_ready_data_node_config_id", "parquet", path="NOT_EXISTING.parquet"
  120. )
  121. not_ready_dn = _DataManager._bulk_get_or_create([not_ready_dn_cfg])[not_ready_dn_cfg]
  122. assert not not_ready_dn.is_ready_for_reading
  123. ready_dn_cfg = Config.configure_data_node("ready_data_node_config_id", "parquet", path=parquet_file_path)
  124. ready_dn = _DataManager._bulk_get_or_create([ready_dn_cfg])[ready_dn_cfg]
  125. assert ready_dn.is_ready_for_reading
  126. @pytest.mark.parametrize(
  127. ["properties", "exists"],
  128. [
  129. ({}, False),
  130. ({"default_data": {"a": ["foo", "bar"]}}, True),
  131. ],
  132. )
  133. def test_create_with_default_data(self, properties, exists):
  134. dn = ParquetDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties)
  135. assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "parquets", dn.id + ".parquet")
  136. assert os.path.exists(dn.path) is exists
  137. @pytest.mark.parametrize("engine", __engine)
  138. def test_modin_deprecated_in_favor_of_pandas(self, engine, parquet_file_path):
  139. # Create ParquetDataNode with modin exposed_type
  140. props = {"path": parquet_file_path, "exposed_type": "modin", "engine": engine}
  141. parquet_data_node_as_modin = ParquetDataNode("bar", Scope.SCENARIO, properties=props)
  142. assert parquet_data_node_as_modin.properties["exposed_type"] == "pandas"
  143. data_modin = parquet_data_node_as_modin.read()
  144. assert isinstance(data_modin, pd.DataFrame)
  145. def test_set_path(self):
  146. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": "foo.parquet"})
  147. assert dn.path == "foo.parquet"
  148. dn.path = "bar.parquet"
  149. assert dn.path == "bar.parquet"
  150. def test_raise_error_unknown_parquet_engine(self):
  151. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  152. with pytest.raises(UnknownParquetEngine):
  153. ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "engine": "foo"})
  154. def test_raise_error_unknown_compression_algorithm(self):
  155. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  156. with pytest.raises(UnknownCompressionAlgorithm):
  157. ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "compression": "foo"})
  158. def test_raise_error_invalid_exposed_type(self):
  159. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  160. with pytest.raises(InvalidExposedType):
  161. ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "foo"})
  162. def test_get_system_file_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
  163. temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
  164. pd.DataFrame([]).to_parquet(temp_file_path)
  165. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "pandas"})
  166. dn.write(pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]}))
  167. previous_edit_date = dn.last_edit_date
  168. sleep(0.1)
  169. pd.DataFrame(pd.DataFrame(data={"col1": [5, 6], "col2": [7, 8]})).to_parquet(temp_file_path)
  170. new_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  171. assert previous_edit_date < dn.last_edit_date
  172. assert new_edit_date == dn.last_edit_date
  173. sleep(0.1)
  174. dn.write(pd.DataFrame(data={"col1": [9, 10], "col2": [10, 12]}))
  175. assert new_edit_date < dn.last_edit_date
  176. os.unlink(temp_file_path)
  177. def test_get_system_folder_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
  178. temp_folder_path = tmpdir_factory.mktemp("data").strpath
  179. temp_file_path = os.path.join(temp_folder_path, "temp.parquet")
  180. pd.DataFrame([]).to_parquet(temp_file_path)
  181. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_folder_path})
  182. initial_edit_date = dn.last_edit_date
  183. # Sleep so that the file can be created successfully on Ubuntu
  184. sleep(0.1)
  185. pd.DataFrame(pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})).to_parquet(temp_file_path)
  186. first_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  187. assert dn.last_edit_date > initial_edit_date
  188. assert dn.last_edit_date == first_edit_date
  189. sleep(0.1)
  190. pd.DataFrame(pd.DataFrame(data={"col1": [5, 6], "col2": [7, 8]})).to_parquet(temp_file_path)
  191. second_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  192. assert dn.last_edit_date > first_edit_date
  193. assert dn.last_edit_date == second_edit_date
  194. os.unlink(temp_file_path)
  195. def test_migrate_to_new_path(self, tmp_path):
  196. _base_path = os.path.join(tmp_path, ".data")
  197. path = os.path.join(_base_path, "test.parquet")
  198. # create a file on old path
  199. os.mkdir(_base_path)
  200. with open(path, "w"):
  201. pass
  202. dn = ParquetDataNode("foo_bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
  203. assert ".data" not in dn.path
  204. assert os.path.exists(dn.path)
  205. def test_is_downloadable(self):
  206. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  207. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
  208. reasons = dn.is_downloadable()
  209. assert reasons
  210. assert reasons.reasons == ""
  211. def test_is_not_downloadable_no_file(self):
  212. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/wrong_path.parquet")
  213. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
  214. reasons = dn.is_downloadable()
  215. assert not reasons
  216. assert len(reasons._reasons) == 1
  217. assert str(NoFileToDownload(path, dn.id)) in reasons.reasons
  218. def test_is_not_downloadable_not_a_file(self):
  219. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample")
  220. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
  221. reasons = dn.is_downloadable()
  222. assert not reasons
  223. assert len(reasons._reasons) == 1
  224. assert str(NotAFile(path, dn.id)) in reasons.reasons
  225. def test_get_downloadable_path(self):
  226. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  227. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
  228. assert dn._get_downloadable_path() == path
  229. def test_get_downloadable_path_with_not_existing_file(self):
  230. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTING.parquet"})
  231. assert dn._get_downloadable_path() == ""
  232. def test_get_downloadable_path_as_directory_should_return_nothing(self):
  233. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/parquet_example")
  234. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path})
  235. assert dn._get_downloadable_path() == ""
  236. def test_upload(self, parquet_file_path, tmpdir_factory):
  237. old_parquet_path = tmpdir_factory.mktemp("data").join("df.parquet").strpath
  238. old_data = pd.DataFrame([{"a": 0, "b": 1, "c": 2}, {"a": 3, "b": 4, "c": 5}])
  239. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": old_parquet_path, "exposed_type": "pandas"})
  240. dn.write(old_data)
  241. old_last_edit_date = dn.last_edit_date
  242. upload_content = pd.read_parquet(parquet_file_path)
  243. with freezegun.freeze_time(old_last_edit_date + timedelta(seconds=1)):
  244. dn._upload(parquet_file_path)
  245. assert_frame_equal(dn.read(), upload_content) # The content of the dn should change to the uploaded content
  246. assert dn.last_edit_date > old_last_edit_date
  247. assert dn.path == old_parquet_path # The path of the dn should not change
  248. def test_upload_with_upload_check_pandas(self, parquet_file_path, tmpdir_factory):
  249. old_parquet_path = tmpdir_factory.mktemp("data").join("df.parquet").strpath
  250. old_data = pd.DataFrame([{"a": 0, "b": 1, "c": 2}, {"a": 3, "b": 4, "c": 5}])
  251. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": old_parquet_path, "exposed_type": "pandas"})
  252. dn.write(old_data)
  253. old_last_edit_date = dn.last_edit_date
  254. def check_data_column(upload_path, upload_data):
  255. return upload_path.endswith(".parquet") and upload_data.columns.tolist() == ["a", "b", "c"]
  256. not_exists_parquet_path = tmpdir_factory.mktemp("data").join("not_exists.parquet").strpath
  257. reasons = dn._upload(not_exists_parquet_path, upload_checker=check_data_column)
  258. assert bool(reasons) is False
  259. assert (
  260. str(list(reasons._reasons[dn.id])[0]) == "The uploaded file not_exists.parquet can not be read,"
  261. f' therefore is not a valid data file for data node "{dn.id}"'
  262. )
  263. not_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.not_parquet").strpath
  264. old_data.to_parquet(not_parquet_path, index=False)
  265. # The upload should fail when the file is not a parquet
  266. reasons = dn._upload(not_parquet_path, upload_checker=check_data_column)
  267. assert bool(reasons) is False
  268. assert (
  269. str(list(reasons._reasons[dn.id])[0])
  270. == f'The uploaded file wrong_format_df.not_parquet has invalid data for data node "{dn.id}"'
  271. )
  272. wrong_format_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.parquet").strpath
  273. pd.DataFrame([{"a": 1, "b": 2, "d": 3}, {"a": 4, "b": 5, "d": 6}]).to_parquet(
  274. wrong_format_parquet_path, index=False
  275. )
  276. # The upload should fail when check_data_column() return False
  277. reasons = dn._upload(wrong_format_parquet_path, upload_checker=check_data_column)
  278. assert bool(reasons) is False
  279. assert (
  280. str(list(reasons._reasons[dn.id])[0])
  281. == f'The uploaded file wrong_format_df.parquet has invalid data for data node "{dn.id}"'
  282. )
  283. assert_frame_equal(dn.read(), old_data) # The content of the dn should not change when upload fails
  284. assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails
  285. assert dn.path == old_parquet_path # The path of the dn should not change
  286. # The upload should succeed when check_data_column() return True
  287. assert dn._upload(parquet_file_path, upload_checker=check_data_column)
  288. def test_upload_with_upload_check_numpy(self, tmpdir_factory):
  289. old_parquet_path = tmpdir_factory.mktemp("data").join("df.parquet").strpath
  290. old_data = np.array([[1, 2, 3], [4, 5, 6]])
  291. new_parquet_path = tmpdir_factory.mktemp("data").join("new_upload_data.parquet").strpath
  292. new_data = np.array([[1, 2, 3], [4, 5, 6]])
  293. pd.DataFrame(new_data, columns=["a", "b", "c"]).to_parquet(new_parquet_path, index=False)
  294. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": old_parquet_path, "exposed_type": "numpy"})
  295. dn.write(old_data)
  296. old_last_edit_date = dn.last_edit_date
  297. def check_data_is_positive(upload_path, upload_data):
  298. return upload_path.endswith(".parquet") and np.all(upload_data > 0)
  299. not_exists_parquet_path = tmpdir_factory.mktemp("data").join("not_exists.parquet").strpath
  300. reasons = dn._upload(not_exists_parquet_path, upload_checker=check_data_is_positive)
  301. assert bool(reasons) is False
  302. assert (
  303. str(list(reasons._reasons[dn.id])[0]) == "The uploaded file not_exists.parquet can not be read,"
  304. f' therefore is not a valid data file for data node "{dn.id}"'
  305. )
  306. not_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.not_parquet").strpath
  307. pd.DataFrame(old_data, columns=["a", "b", "c"]).to_parquet(not_parquet_path, index=False)
  308. # The upload should fail when the file is not a parquet
  309. reasons = dn._upload(not_parquet_path, upload_checker=check_data_is_positive)
  310. assert (
  311. str(list(reasons._reasons[dn.id])[0])
  312. == f'The uploaded file wrong_format_df.not_parquet has invalid data for data node "{dn.id}"'
  313. )
  314. wrong_format_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.parquet").strpath
  315. pd.DataFrame(np.array([[-1, 2, 3], [-4, -5, -6]]), columns=["a", "b", "c"]).to_parquet(
  316. wrong_format_parquet_path, index=False
  317. )
  318. # The upload should fail when check_data_is_positive() return False
  319. reasons = dn._upload(wrong_format_parquet_path, upload_checker=check_data_is_positive)
  320. assert (
  321. str(list(reasons._reasons[dn.id])[0])
  322. == f'The uploaded file wrong_format_df.parquet has invalid data for data node "{dn.id}"'
  323. )
  324. np.array_equal(dn.read(), old_data) # The content of the dn should not change when upload fails
  325. assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails
  326. assert dn.path == old_parquet_path # The path of the dn should not change
  327. # The upload should succeed when check_data_is_positive() return True
  328. assert dn._upload(new_parquet_path, upload_checker=check_data_is_positive)