test_parquet_data_node.py 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import pathlib
  13. import uuid
  14. from datetime import datetime
  15. from importlib import util
  16. from time import sleep
  17. import pandas as pd
  18. import pytest
  19. from taipy.config.common.scope import Scope
  20. from taipy.config.config import Config
  21. from taipy.config.exceptions.exceptions import InvalidConfigurationId
  22. from taipy.core.data._data_manager import _DataManager
  23. from taipy.core.data._data_manager_factory import _DataManagerFactory
  24. from taipy.core.data.data_node_id import DataNodeId
  25. from taipy.core.data.parquet import ParquetDataNode
  26. from taipy.core.exceptions.exceptions import (
  27. InvalidExposedType,
  28. UnknownCompressionAlgorithm,
  29. UnknownParquetEngine,
  30. )
  31. @pytest.fixture(scope="function", autouse=True)
  32. def cleanup():
  33. yield
  34. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
  35. if os.path.isfile(path):
  36. os.remove(path)
  37. class MyCustomObject:
  38. def __init__(self, id, integer, text):
  39. self.id = id
  40. self.integer = integer
  41. self.text = text
  42. class MyOtherCustomObject:
  43. def __init__(self, id, sentence):
  44. self.id = id
  45. self.sentence = sentence
  46. def create_custom_class(**kwargs):
  47. return MyOtherCustomObject(id=kwargs["id"], sentence=kwargs["text"])
  48. class TestParquetDataNode:
  49. __engine = ["pyarrow"]
  50. if util.find_spec("fastparquet"):
  51. __engine.append("fastparquet")
  52. def test_create(self):
  53. path = "data/node/path"
  54. compression = "snappy"
  55. parquet_dn_config = Config.configure_parquet_data_node(
  56. id="foo_bar", default_path=path, compression=compression, name="super name"
  57. )
  58. dn = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config, None, None)
  59. assert isinstance(dn, ParquetDataNode)
  60. assert dn.storage_type() == "parquet"
  61. assert dn.config_id == "foo_bar"
  62. assert dn.name == "super name"
  63. assert dn.scope == Scope.SCENARIO
  64. assert dn.id is not None
  65. assert dn.owner_id is None
  66. assert dn.last_edit_date is None
  67. assert dn.job_ids == []
  68. assert not dn.is_ready_for_reading
  69. assert dn.path == path
  70. assert dn.exposed_type == "pandas"
  71. assert dn.compression == "snappy"
  72. assert dn.engine == "pyarrow"
  73. parquet_dn_config_1 = Config.configure_parquet_data_node(
  74. id="bar", default_path=path, compression=compression, exposed_type=MyCustomObject
  75. )
  76. dn_1 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_1, None, None)
  77. assert isinstance(dn_1, ParquetDataNode)
  78. assert dn_1.exposed_type == MyCustomObject
  79. with pytest.raises(InvalidConfigurationId):
  80. dn = ParquetDataNode("foo bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
  81. def test_get_user_properties(self, parquet_file_path):
  82. dn_1 = ParquetDataNode("dn_1", Scope.SCENARIO, properties={"path": parquet_file_path})
  83. assert dn_1._get_user_properties() == {}
  84. dn_2 = ParquetDataNode(
  85. "dn_2",
  86. Scope.SCENARIO,
  87. properties={
  88. "exposed_type": "numpy",
  89. "default_data": "foo",
  90. "default_path": parquet_file_path,
  91. "engine": "pyarrow",
  92. "compression": "snappy",
  93. "read_kwargs": {"columns": ["a", "b"]},
  94. "write_kwargs": {"index": False},
  95. "foo": "bar",
  96. },
  97. )
  98. # exposed_type, default_data, default_path, path, engine, compression, read_kwargs, write_kwargs
  99. # are filtered out
  100. assert dn_2._get_user_properties() == {"foo": "bar"}
  101. def test_new_parquet_data_node_with_existing_file_is_ready_for_reading(self, parquet_file_path):
  102. not_ready_dn_cfg = Config.configure_data_node(
  103. "not_ready_data_node_config_id", "parquet", path="NOT_EXISTING.parquet"
  104. )
  105. not_ready_dn = _DataManager._bulk_get_or_create([not_ready_dn_cfg])[not_ready_dn_cfg]
  106. assert not not_ready_dn.is_ready_for_reading
  107. ready_dn_cfg = Config.configure_data_node("ready_data_node_config_id", "parquet", path=parquet_file_path)
  108. ready_dn = _DataManager._bulk_get_or_create([ready_dn_cfg])[ready_dn_cfg]
  109. assert ready_dn.is_ready_for_reading
  110. @pytest.mark.parametrize(
  111. ["properties", "exists"],
  112. [
  113. ({}, False),
  114. ({"default_data": {"a": ["foo", "bar"]}}, True),
  115. ],
  116. )
  117. def test_create_with_default_data(self, properties, exists):
  118. dn = ParquetDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties)
  119. assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "parquets", dn.id + ".parquet")
  120. assert os.path.exists(dn.path) is exists
  121. @pytest.mark.parametrize("engine", __engine)
  122. def test_modin_deprecated_in_favor_of_pandas(self, engine, parquet_file_path):
  123. # Create ParquetDataNode with modin exposed_type
  124. props = {"path": parquet_file_path, "exposed_type": "modin", "engine": engine}
  125. parquet_data_node_as_modin = ParquetDataNode("bar", Scope.SCENARIO, properties=props)
  126. assert parquet_data_node_as_modin.properties["exposed_type"] == "pandas"
  127. data_modin = parquet_data_node_as_modin.read()
  128. assert isinstance(data_modin, pd.DataFrame)
  129. def test_set_path(self):
  130. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": "foo.parquet"})
  131. assert dn.path == "foo.parquet"
  132. dn.path = "bar.parquet"
  133. assert dn.path == "bar.parquet"
  134. def test_raise_error_unknown_parquet_engine(self):
  135. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  136. with pytest.raises(UnknownParquetEngine):
  137. ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "engine": "foo"})
  138. def test_raise_error_unknown_compression_algorithm(self):
  139. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  140. with pytest.raises(UnknownCompressionAlgorithm):
  141. ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "compression": "foo"})
  142. def test_raise_error_invalid_exposed_type(self):
  143. path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
  144. with pytest.raises(InvalidExposedType):
  145. ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "foo"})
  146. def test_get_system_file_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
  147. temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
  148. pd.DataFrame([]).to_parquet(temp_file_path)
  149. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "pandas"})
  150. dn.write(pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]}))
  151. previous_edit_date = dn.last_edit_date
  152. sleep(0.1)
  153. pd.DataFrame(pd.DataFrame(data={"col1": [5, 6], "col2": [7, 8]})).to_parquet(temp_file_path)
  154. new_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  155. assert previous_edit_date < dn.last_edit_date
  156. assert new_edit_date == dn.last_edit_date
  157. sleep(0.1)
  158. dn.write(pd.DataFrame(data={"col1": [9, 10], "col2": [10, 12]}))
  159. assert new_edit_date < dn.last_edit_date
  160. os.unlink(temp_file_path)
  161. def test_get_system_folder_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
  162. temp_folder_path = tmpdir_factory.mktemp("data").strpath
  163. temp_file_path = os.path.join(temp_folder_path, "temp.parquet")
  164. pd.DataFrame([]).to_parquet(temp_file_path)
  165. dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_folder_path})
  166. initial_edit_date = dn.last_edit_date
  167. # Sleep so that the file can be created successfully on Ubuntu
  168. sleep(0.1)
  169. pd.DataFrame(pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})).to_parquet(temp_file_path)
  170. first_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  171. assert dn.last_edit_date > initial_edit_date
  172. assert dn.last_edit_date == first_edit_date
  173. sleep(0.1)
  174. pd.DataFrame(pd.DataFrame(data={"col1": [5, 6], "col2": [7, 8]})).to_parquet(temp_file_path)
  175. second_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
  176. assert dn.last_edit_date > first_edit_date
  177. assert dn.last_edit_date == second_edit_date
  178. os.unlink(temp_file_path)
  179. def test_migrate_to_new_path(self, tmp_path):
  180. _base_path = os.path.join(tmp_path, ".data")
  181. path = os.path.join(_base_path, "test.parquet")
  182. # create a file on old path
  183. os.mkdir(_base_path)
  184. with open(path, "w"):
  185. pass
  186. dn = ParquetDataNode("foo_bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
  187. assert ".data" not in dn.path
  188. assert os.path.exists(dn.path)