123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404 |
- # Copyright 2021-2025 Avaiga Private Limited
- #
- # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
- # the License. You may obtain a copy of the License at
- #
- # http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
- # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
- # specific language governing permissions and limitations under the License.
- import os
- import pathlib
- import re
- import uuid
- from datetime import datetime, timedelta
- from importlib import util
- from time import sleep
- import freezegun
- import numpy as np
- import pandas as pd
- import pytest
- from pandas.testing import assert_frame_equal
- from taipy import Scope
- from taipy.common.config import Config
- from taipy.common.config.exceptions.exceptions import InvalidConfigurationId
- from taipy.core.common._utils import _normalize_path
- from taipy.core.data._data_manager import _DataManager
- from taipy.core.data._data_manager_factory import _DataManagerFactory
- from taipy.core.data.data_node_id import DataNodeId
- from taipy.core.data.parquet import ParquetDataNode
- from taipy.core.exceptions.exceptions import (
- InvalidExposedType,
- UnknownCompressionAlgorithm,
- UnknownParquetEngine,
- )
- from taipy.core.reason import NoFileToDownload, NotAFile
- @pytest.fixture(scope="function", autouse=True)
- def cleanup():
- yield
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
- if os.path.isfile(path):
- os.remove(path)
- class MyCustomObject:
- def __init__(self, id, integer, text):
- self.id = id
- self.integer = integer
- self.text = text
- class MyOtherCustomObject:
- def __init__(self, id, sentence):
- self.id = id
- self.sentence = sentence
- def create_custom_class(**kwargs):
- return MyOtherCustomObject(id=kwargs["id"], sentence=kwargs["text"])
- class TestParquetDataNode:
- __engine = ["pyarrow"]
- if util.find_spec("fastparquet"):
- __engine.append("fastparquet")
- def test_create(self):
- path = "data/node/path"
- compression = "snappy"
- parquet_dn_config = Config.configure_parquet_data_node(
- id="foo_bar", default_path=path, compression=compression, name="super name"
- )
- dn = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config, None, None)
- assert isinstance(dn, ParquetDataNode)
- assert dn.storage_type() == "parquet"
- assert dn.config_id == "foo_bar"
- assert dn.name == "super name"
- assert dn.scope == Scope.SCENARIO
- assert dn.id is not None
- assert dn.owner_id is None
- assert dn.last_edit_date is None
- assert dn.job_ids == []
- assert not dn.is_ready_for_reading
- assert dn.path == path
- assert dn.properties["exposed_type"] == "pandas"
- assert dn.properties["compression"] == "snappy"
- assert dn.properties["engine"] == "pyarrow"
- parquet_dn_config_1 = Config.configure_parquet_data_node(
- id="bar", default_path=path, compression=compression, exposed_type=MyCustomObject
- )
- dn_1 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_1, None, None)
- assert isinstance(dn_1, ParquetDataNode)
- assert dn_1.properties["exposed_type"] == MyCustomObject
- parquet_dn_config_2 = Config.configure_parquet_data_node(
- id="bar", default_path=path, compression=compression, exposed_type=np.ndarray
- )
- dn_2 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_2, None, None)
- assert isinstance(dn_2, ParquetDataNode)
- assert dn_2.properties["exposed_type"] == np.ndarray
- parquet_dn_config_3 = Config.configure_parquet_data_node(
- id="bar", default_path=path, compression=compression, exposed_type=pd.DataFrame
- )
- dn_3 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_3, None, None)
- assert isinstance(dn_3, ParquetDataNode)
- assert dn_3.properties["exposed_type"] == pd.DataFrame
- with pytest.raises(InvalidConfigurationId):
- dn = ParquetDataNode("foo bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
- def test_get_user_properties(self, parquet_file_path):
- dn_1 = ParquetDataNode("dn_1", Scope.SCENARIO, properties={"path": parquet_file_path})
- assert dn_1._get_user_properties() == {}
- dn_2 = ParquetDataNode(
- "dn_2",
- Scope.SCENARIO,
- properties={
- "exposed_type": "numpy",
- "default_data": "foo",
- "default_path": parquet_file_path,
- "engine": "pyarrow",
- "compression": "snappy",
- "read_kwargs": {"columns": ["a", "b"]},
- "write_kwargs": {"index": False},
- "foo": "bar",
- },
- )
- # exposed_type, default_data, default_path, path, engine, compression, read_kwargs, write_kwargs
- # are filtered out
- assert dn_2._get_user_properties() == {"foo": "bar"}
- def test_new_parquet_data_node_with_existing_file_is_ready_for_reading(self, parquet_file_path):
- not_ready_dn_cfg = Config.configure_data_node(
- "not_ready_data_node_config_id", "parquet", path="NOT_EXISTING.parquet"
- )
- not_ready_dn = _DataManager._bulk_get_or_create([not_ready_dn_cfg])[not_ready_dn_cfg]
- assert not not_ready_dn.is_ready_for_reading
- ready_dn_cfg = Config.configure_data_node("ready_data_node_config_id", "parquet", path=parquet_file_path)
- ready_dn = _DataManager._bulk_get_or_create([ready_dn_cfg])[ready_dn_cfg]
- assert ready_dn.is_ready_for_reading
- @pytest.mark.parametrize(
- ["properties", "exists"],
- [
- ({}, False),
- ({"default_data": {"a": ["foo", "bar"]}}, True),
- ],
- )
- def test_create_with_default_data(self, properties, exists):
- dn = ParquetDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties)
- assert dn.path == f"{Config.core.storage_folder}parquets/{dn.id}.parquet"
- assert os.path.exists(dn.path) is exists
- @pytest.mark.parametrize("engine", __engine)
- def test_modin_deprecated_in_favor_of_pandas(self, engine, parquet_file_path):
- # Create ParquetDataNode with modin exposed_type
- props = {"path": parquet_file_path, "exposed_type": "modin", "engine": engine}
- parquet_data_node_as_modin = ParquetDataNode("bar", Scope.SCENARIO, properties=props)
- assert parquet_data_node_as_modin.properties["exposed_type"] == "pandas"
- data_modin = parquet_data_node_as_modin.read()
- assert isinstance(data_modin, pd.DataFrame)
- def test_set_path(self):
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": "foo.parquet"})
- assert dn.path == "foo.parquet"
- dn.path = "bar.parquet"
- assert dn.path == "bar.parquet"
- def test_raise_error_unknown_parquet_engine(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
- with pytest.raises(UnknownParquetEngine):
- ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "engine": "foo"})
- def test_raise_error_unknown_compression_algorithm(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
- with pytest.raises(UnknownCompressionAlgorithm):
- ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "compression": "foo"})
- def test_raise_error_invalid_exposed_type(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
- with pytest.raises(InvalidExposedType):
- ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "foo"})
- def test_get_system_file_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
- temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
- pd.DataFrame([]).to_parquet(temp_file_path)
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "pandas"})
- dn.write(pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]}))
- previous_edit_date = dn.last_edit_date
- sleep(0.1)
- pd.DataFrame(pd.DataFrame(data={"col1": [5, 6], "col2": [7, 8]})).to_parquet(temp_file_path)
- new_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
- assert previous_edit_date < dn.last_edit_date
- assert new_edit_date == dn.last_edit_date
- sleep(0.1)
- dn.write(pd.DataFrame(data={"col1": [9, 10], "col2": [10, 12]}))
- assert new_edit_date < dn.last_edit_date
- os.unlink(temp_file_path)
- def test_get_system_folder_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
- temp_folder_path = tmpdir_factory.mktemp("data").strpath
- temp_file_path = os.path.join(temp_folder_path, "temp.parquet")
- pd.DataFrame([]).to_parquet(temp_file_path)
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_folder_path})
- initial_edit_date = dn.last_edit_date
- # Sleep so that the file can be created successfully on Ubuntu
- sleep(0.1)
- pd.DataFrame(pd.DataFrame(data={"col1": [1, 2], "col2": [3, 4]})).to_parquet(temp_file_path)
- first_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
- assert dn.last_edit_date > initial_edit_date
- assert dn.last_edit_date == first_edit_date
- sleep(0.1)
- pd.DataFrame(pd.DataFrame(data={"col1": [5, 6], "col2": [7, 8]})).to_parquet(temp_file_path)
- second_edit_date = datetime.fromtimestamp(os.path.getmtime(temp_file_path))
- assert dn.last_edit_date > first_edit_date
- assert dn.last_edit_date == second_edit_date
- os.unlink(temp_file_path)
- def test_migrate_to_new_path(self, tmp_path):
- _base_path = os.path.join(tmp_path, ".data")
- path = os.path.join(_base_path, "test.parquet")
- # create a file on old path
- os.mkdir(_base_path)
- with open(path, "w"):
- pass
- dn = ParquetDataNode("foo_bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
- assert ".data" not in dn.path
- assert os.path.exists(dn.path)
- def test_is_downloadable(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
- reasons = dn.is_downloadable()
- assert reasons
- assert reasons.reasons == ""
- def test_is_not_downloadable_no_file(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/wrong_path.parquet")
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
- reasons = dn.is_downloadable()
- assert not reasons
- assert len(reasons._reasons) == 1
- assert str(NoFileToDownload(_normalize_path(path), dn.id)) in reasons.reasons
- def test_is_not_downloadable_not_a_file(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample")
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
- reasons = dn.is_downloadable()
- assert not reasons
- assert len(reasons._reasons) == 1
- assert str(NotAFile(_normalize_path(path), dn.id)) in reasons.reasons
- def test_get_downloadable_path(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
- assert re.split(r"[\\/]", dn._get_downloadable_path()) == re.split(r"[\\/]", path)
- def test_get_downloadable_path_with_not_existing_file(self):
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTING.parquet"})
- assert dn._get_downloadable_path() == ""
- def test_get_downloadable_path_as_directory_should_return_nothing(self):
- path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/parquet_example")
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path})
- assert dn._get_downloadable_path() == ""
- def test_upload(self, parquet_file_path, tmpdir_factory):
- old_parquet_path = tmpdir_factory.mktemp("data").join("df.parquet").strpath
- old_data = pd.DataFrame([{"a": 0, "b": 1, "c": 2}, {"a": 3, "b": 4, "c": 5}])
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": old_parquet_path, "exposed_type": "pandas"})
- dn.write(old_data)
- old_last_edit_date = dn.last_edit_date
- upload_content = pd.read_parquet(parquet_file_path)
- with freezegun.freeze_time(old_last_edit_date + timedelta(seconds=1)):
- dn._upload(parquet_file_path)
- assert_frame_equal(dn.read(), upload_content) # The content of the dn should change to the uploaded content
- assert dn.last_edit_date > old_last_edit_date
- assert dn.path == _normalize_path(old_parquet_path) # The path of the dn should not change
- def test_upload_with_upload_check_pandas(self, parquet_file_path, tmpdir_factory):
- old_parquet_path = tmpdir_factory.mktemp("data").join("df.parquet").strpath
- old_data = pd.DataFrame([{"a": 0, "b": 1, "c": 2}, {"a": 3, "b": 4, "c": 5}])
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": old_parquet_path, "exposed_type": "pandas"})
- dn.write(old_data)
- old_last_edit_date = dn.last_edit_date
- def check_data_column(upload_path, upload_data):
- return upload_path.endswith(".parquet") and upload_data.columns.tolist() == ["a", "b", "c"]
- not_exists_parquet_path = tmpdir_factory.mktemp("data").join("not_exists.parquet").strpath
- reasons = dn._upload(not_exists_parquet_path, upload_checker=check_data_column)
- assert bool(reasons) is False
- assert (
- str(list(reasons._reasons[dn.id])[0]) == "The uploaded file not_exists.parquet can not be read,"
- f' therefore is not a valid data file for data node "{dn.id}"'
- )
- not_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.not_parquet").strpath
- old_data.to_parquet(not_parquet_path, index=False)
- # The upload should fail when the file is not a parquet
- reasons = dn._upload(not_parquet_path, upload_checker=check_data_column)
- assert bool(reasons) is False
- assert (
- str(list(reasons._reasons[dn.id])[0])
- == f'The uploaded file wrong_format_df.not_parquet has invalid data for data node "{dn.id}"'
- )
- wrong_format_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.parquet").strpath
- pd.DataFrame([{"a": 1, "b": 2, "d": 3}, {"a": 4, "b": 5, "d": 6}]).to_parquet(
- wrong_format_parquet_path, index=False
- )
- # The upload should fail when check_data_column() return False
- reasons = dn._upload(wrong_format_parquet_path, upload_checker=check_data_column)
- assert bool(reasons) is False
- assert (
- str(list(reasons._reasons[dn.id])[0])
- == f'The uploaded file wrong_format_df.parquet has invalid data for data node "{dn.id}"'
- )
- assert_frame_equal(dn.read(), old_data) # The content of the dn should not change when upload fails
- assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails
- assert dn.path == _normalize_path(old_parquet_path) # The path of the dn should not change
- # The upload should succeed when check_data_column() return True
- assert dn._upload(parquet_file_path, upload_checker=check_data_column)
- def test_upload_with_upload_check_numpy(self, tmpdir_factory):
- old_parquet_path = tmpdir_factory.mktemp("data").join("df.parquet").strpath
- old_data = np.array([[1, 2, 3], [4, 5, 6]])
- new_parquet_path = tmpdir_factory.mktemp("data").join("new_upload_data.parquet").strpath
- new_data = np.array([[1, 2, 3], [4, 5, 6]])
- pd.DataFrame(new_data, columns=["a", "b", "c"]).to_parquet(new_parquet_path, index=False)
- dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": old_parquet_path, "exposed_type": "numpy"})
- dn.write(old_data)
- old_last_edit_date = dn.last_edit_date
- def check_data_is_positive(upload_path, upload_data):
- return upload_path.endswith(".parquet") and np.all(upload_data > 0)
- not_exists_parquet_path = tmpdir_factory.mktemp("data").join("not_exists.parquet").strpath
- reasons = dn._upload(not_exists_parquet_path, upload_checker=check_data_is_positive)
- assert bool(reasons) is False
- assert (
- str(list(reasons._reasons[dn.id])[0]) == "The uploaded file not_exists.parquet can not be read,"
- f' therefore is not a valid data file for data node "{dn.id}"'
- )
- not_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.not_parquet").strpath
- pd.DataFrame(old_data, columns=["a", "b", "c"]).to_parquet(not_parquet_path, index=False)
- # The upload should fail when the file is not a parquet
- reasons = dn._upload(not_parquet_path, upload_checker=check_data_is_positive)
- assert (
- str(list(reasons._reasons[dn.id])[0])
- == f'The uploaded file wrong_format_df.not_parquet has invalid data for data node "{dn.id}"'
- )
- wrong_format_parquet_path = tmpdir_factory.mktemp("data").join("wrong_format_df.parquet").strpath
- pd.DataFrame(np.array([[-1, 2, 3], [-4, -5, -6]]), columns=["a", "b", "c"]).to_parquet(
- wrong_format_parquet_path, index=False
- )
- # The upload should fail when check_data_is_positive() return False
- reasons = dn._upload(wrong_format_parquet_path, upload_checker=check_data_is_positive)
- assert (
- str(list(reasons._reasons[dn.id])[0])
- == f'The uploaded file wrong_format_df.parquet has invalid data for data node "{dn.id}"'
- )
- np.array_equal(dn.read(), old_data) # The content of the dn should not change when upload fails
- assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails
- assert dn.path == _normalize_path(old_parquet_path) # The path of the dn should not change
- # The upload should succeed when check_data_is_positive() return True
- assert dn._upload(new_parquet_path, upload_checker=check_data_is_positive)
|