Explorar o código

refactor: route DataNode.read() and write() to _DataNodeManager

trgiangdo hai 1 mes
pai
achega
1fb1f443e8

+ 1 - 1
taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py

@@ -54,7 +54,7 @@ class _TaskFunctionWrapper:
 
 
     def _read_inputs(self, inputs: List[DataNode]) -> List[Any]:
     def _read_inputs(self, inputs: List[DataNode]) -> List[Any]:
         data_manager = _DataManagerFactory._build_manager()
         data_manager = _DataManagerFactory._build_manager()
-        return [data_manager._get(dn.id).read_or_raise() for dn in inputs]
+        return [data_manager._read(data_manager._get(dn.id)) for dn in inputs]
 
 
     def _write_data(self, outputs: List[DataNode], results, job_id: JobId):
     def _write_data(self, outputs: List[DataNode], results, job_id: JobId):
         data_manager = _DataManagerFactory._build_manager()
         data_manager = _DataManagerFactory._build_manager()

+ 42 - 2
taipy/core/data/_data_manager.py

@@ -10,10 +10,11 @@
 # specific language governing permissions and limitations under the License.
 # specific language governing permissions and limitations under the License.
 
 
 import os
 import os
-from typing import Dict, Iterable, List, Optional, Set, Union
+from typing import Any, Dict, Iterable, List, Optional, Set, Union
 
 
 from taipy.common.config import Config
 from taipy.common.config import Config
 from taipy.common.config._config import _Config
 from taipy.common.config._config import _Config
+from taipy.core.job.job_id import JobId
 
 
 from .._manager._manager import _Manager
 from .._manager._manager import _Manager
 from .._repository._abstract_repository import _AbstractRepository
 from .._repository._abstract_repository import _AbstractRepository
@@ -21,7 +22,7 @@ from .._version._version_mixin import _VersionMixin
 from ..common.scope import Scope
 from ..common.scope import Scope
 from ..config.data_node_config import DataNodeConfig
 from ..config.data_node_config import DataNodeConfig
 from ..cycle.cycle_id import CycleId
 from ..cycle.cycle_id import CycleId
-from ..exceptions.exceptions import InvalidDataNodeType
+from ..exceptions.exceptions import InvalidDataNodeType, NoData
 from ..notification import Event, EventEntityType, EventOperation, Notifier, _make_event
 from ..notification import Event, EventEntityType, EventOperation, Notifier, _make_event
 from ..reason import EntityDoesNotExist, NotGlobalScope, ReasonCollection, WrongConfigType
 from ..reason import EntityDoesNotExist, NotGlobalScope, ReasonCollection, WrongConfigType
 from ..reason.reason import DataIsNotDuplicable
 from ..reason.reason import DataIsNotDuplicable
@@ -125,6 +126,45 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         filters = cls._build_filters_with_version(version_number)
         filters = cls._build_filters_with_version(version_number)
         return cls._repository._load_all(filters)
         return cls._repository._load_all(filters)
 
 
+    @classmethod
+    def _read(cls, data_node: DataNode) -> Any:
+        """Read the data referenced by this data node.
+
+        Returns:
+            The data referenced by this data node.
+
+        Raises:
+            NoData^: If the data has not been written yet.
+        """
+        if not data_node.last_edit_date:
+            raise NoData(f"Data node {data_node.id} from config {data_node.config_id} has not been written yet.")
+
+        return data_node._read()
+
+    @classmethod
+    def _append(
+        cls, data_node: DataNode, data, editor_id: Optional[str] = None, comment: Optional[str] = None, **kwargs: Any
+    ):
+        data_node._append(data)
+        data_node.track_edit(editor_id=editor_id, comment=comment, **kwargs)
+        data_node.unlock_edit()
+        cls._update(data_node)
+
+    @classmethod
+    def _write(
+        cls,
+        data_node: DataNode,
+        data,
+        job_id: Optional[JobId] = None,
+        editor_id: Optional[str] = None,
+        comment: Optional[str] = None,
+        **kwargs: Any,
+    ):
+        data_node._write(data)
+        data_node.track_edit(job_id=job_id, editor_id=editor_id, comment=comment, **kwargs)
+        data_node.unlock_edit()
+        cls._update(data_node)
+
     @classmethod
     @classmethod
     def _clean_generated_file(cls, data_node: DataNode) -> None:
     def _clean_generated_file(cls, data_node: DataNode) -> None:
         if not isinstance(data_node, _FileDataNodeMixin):
         if not isinstance(data_node, _FileDataNodeMixin):

+ 9 - 24
taipy/core/data/data_node.py

@@ -395,27 +395,16 @@ class DataNode(_Entity, _Labeled):
         """
         """
         raise NotImplementedError
         raise NotImplementedError
 
 
-    def read_or_raise(self) -> Any:
-        """Read the data referenced by this data node.
-
-        Returns:
-            The data referenced by this data node.
-
-        Raises:
-            NoData^: If the data has not been written yet.
-        """
-        if not self.last_edit_date:
-            raise NoData(f"Data node {self.id} from config {self.config_id} has not been written yet.")
-        return self._read()
-
     def read(self) -> Any:
     def read(self) -> Any:
         """Read the data referenced by this data node.
         """Read the data referenced by this data node.
 
 
         Returns:
         Returns:
             The data referenced by this data node. None if the data has not been written yet.
             The data referenced by this data node. None if the data has not been written yet.
         """
         """
+        from ._data_manager_factory import _DataManagerFactory
+
         try:
         try:
-            return self.read_or_raise()
+            return _DataManagerFactory._build_manager()._read(self)
         except NoData:
         except NoData:
             self._logger.warning(
             self._logger.warning(
                 f"Data node {self.id} from config {self.config_id} is being read but has never been written."
                 f"Data node {self.id} from config {self.config_id} is being read but has never been written."
@@ -432,8 +421,6 @@ class DataNode(_Entity, _Labeled):
             **kwargs (Any): Extra information to attach to the edit document
             **kwargs (Any): Extra information to attach to the edit document
                 corresponding to this write.
                 corresponding to this write.
         """
         """
-        from ._data_manager_factory import _DataManagerFactory
-
         if (
         if (
             editor_id
             editor_id
             and self.edit_in_progress
             and self.edit_in_progress
@@ -441,10 +428,10 @@ class DataNode(_Entity, _Labeled):
             and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())
             and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())
         ):
         ):
             raise DataNodeIsBeingEdited(self.id, self.editor_id)
             raise DataNodeIsBeingEdited(self.id, self.editor_id)
-        self._append(data)
-        self.track_edit(editor_id=editor_id, comment=comment, **kwargs)
-        self.unlock_edit()
-        _DataManagerFactory._build_manager()._update(self)
+
+        from ._data_manager_factory import _DataManagerFactory
+
+        _DataManagerFactory._build_manager()._append(self, data, editor_id, comment, **kwargs)
 
 
     def write(
     def write(
         self,
         self,
@@ -473,12 +460,10 @@ class DataNode(_Entity, _Labeled):
             and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())
             and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())
         ):
         ):
             raise DataNodeIsBeingEdited(self.id, self.editor_id)
             raise DataNodeIsBeingEdited(self.id, self.editor_id)
-        self._write(data)
-        self.track_edit(job_id=job_id, editor_id=editor_id, comment=comment, **kwargs)
-        self.unlock_edit()
+
         from ._data_manager_factory import _DataManagerFactory
         from ._data_manager_factory import _DataManagerFactory
 
 
-        _DataManagerFactory._build_manager()._update(self)
+        _DataManagerFactory._build_manager()._write(self, data, job_id, editor_id, comment, **kwargs)
 
 
     def track_edit(
     def track_edit(
         self,
         self,

+ 1 - 4
taipy/core/exceptions/exceptions.py

@@ -65,10 +65,7 @@ class MultipleDataNodeFromSameConfigWithSameOwner(Exception):
 
 
 
 
 class NoData(Exception):
 class NoData(Exception):
-    """Raised if a data node is read before it has been written.
-
-    This exception can be raised by `DataNode.read_or_raise()^`.
-    """
+    """Raised if a data node is read before it has been written."""
 
 
 
 
 class UnknownDatabaseEngine(Exception):
 class UnknownDatabaseEngine(Exception):

+ 2 - 2
tests/core/data/test_data_node.py

@@ -150,9 +150,9 @@ class TestDataNode:
     def test_read_write(self):
     def test_read_write(self):
         dn = FakeDataNode("foo_bar")
         dn = FakeDataNode("foo_bar")
         _DataManagerFactory._build_manager()._repository._save(dn)
         _DataManagerFactory._build_manager()._repository._save(dn)
+        assert dn.read() is None
         with pytest.raises(NoData):
         with pytest.raises(NoData):
-            assert dn.read() is None
-            dn.read_or_raise()
+            _DataManagerFactory._build_manager()._read(dn)
         assert dn.write_has_been_called == 0
         assert dn.write_has_been_called == 0
         assert dn.read_has_been_called == 0
         assert dn.read_has_been_called == 0
         assert not dn.is_ready_for_reading
         assert not dn.is_ready_for_reading

+ 2 - 2
tests/core/data/test_in_memory_data_node.py

@@ -52,9 +52,9 @@ class TestInMemoryDataNodeEntity:
     def test_read_and_write(self):
     def test_read_and_write(self):
         no_data_dn = InMemoryDataNode("foo", Scope.SCENARIO, DataNodeId("dn_id"))
         no_data_dn = InMemoryDataNode("foo", Scope.SCENARIO, DataNodeId("dn_id"))
         _DataManagerFactory._build_manager()._repository._save(no_data_dn)
         _DataManagerFactory._build_manager()._repository._save(no_data_dn)
+        assert no_data_dn.read() is None
         with pytest.raises(NoData):
         with pytest.raises(NoData):
-            assert no_data_dn.read() is None
-            no_data_dn.read_or_raise()
+            _DataManagerFactory._build_manager()._read(no_data_dn)
         in_mem_dn = InMemoryDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
         in_mem_dn = InMemoryDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
         _DataManagerFactory._build_manager()._repository._save(in_mem_dn)
         _DataManagerFactory._build_manager()._repository._save(in_mem_dn)
         assert isinstance(in_mem_dn.read(), str)
         assert isinstance(in_mem_dn.read(), str)

+ 2 - 2
tests/core/data/test_json_data_node.py

@@ -162,9 +162,9 @@ class TestJSONDataNode:
 
 
     def test_read_non_existing_json(self):
     def test_read_non_existing_json(self):
         not_existing_json = JSONDataNode("foo", Scope.SCENARIO, properties={"default_path": "WRONG.json"})
         not_existing_json = JSONDataNode("foo", Scope.SCENARIO, properties={"default_path": "WRONG.json"})
+        assert not_existing_json.read() is None
         with pytest.raises(NoData):
         with pytest.raises(NoData):
-            assert not_existing_json.read() is None
-            not_existing_json.read_or_raise()
+            _DataManagerFactory._build_manager()._read(not_existing_json)
 
 
     def test_read(self):
     def test_read(self):
         path_1 = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/json/example_list.json")
         path_1 = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/json/example_list.json")

+ 2 - 2
tests/core/data/test_pickle_data_node.py

@@ -117,9 +117,9 @@ class TestPickleDataNodeEntity:
     def test_read_and_write(self):
     def test_read_and_write(self):
         no_data_dn = PickleDataNode("foo", Scope.SCENARIO)
         no_data_dn = PickleDataNode("foo", Scope.SCENARIO)
         _DataManagerFactory._build_manager()._repository._save(no_data_dn)
         _DataManagerFactory._build_manager()._repository._save(no_data_dn)
+        assert no_data_dn.read() is None
         with pytest.raises(NoData):
         with pytest.raises(NoData):
-            assert no_data_dn.read() is None
-            no_data_dn.read_or_raise()
+            _DataManagerFactory._build_manager()._read(no_data_dn)
         pickle_str = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
         pickle_str = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
         _DataManagerFactory._build_manager()._repository._save(pickle_str)
         _DataManagerFactory._build_manager()._repository._save(pickle_str)
         assert isinstance(pickle_str.read(), str)
         assert isinstance(pickle_str.read(), str)

+ 5 - 4
tests/core/data/test_read_csv_data_node.py

@@ -18,6 +18,7 @@ import pandas as pd
 import pytest
 import pytest
 
 
 from taipy import Scope
 from taipy import Scope
+from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data.csv import CSVDataNode
 from taipy.core.data.csv import CSVDataNode
 from taipy.core.exceptions.exceptions import NoData
 from taipy.core.exceptions.exceptions import NoData
 
 
@@ -33,9 +34,9 @@ class MyCustomObject:
 
 
 def test_raise_no_data_with_header():
 def test_raise_no_data_with_header():
     not_existing_csv = CSVDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.csv", "has_header": True})
     not_existing_csv = CSVDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.csv", "has_header": True})
+    assert not_existing_csv.read() is None
     with pytest.raises(NoData):
     with pytest.raises(NoData):
-        assert not_existing_csv.read() is None
-        not_existing_csv.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_csv)
 
 
 
 
 def test_read_with_header_pandas():
 def test_read_with_header_pandas():
@@ -75,9 +76,9 @@ def test_read_with_header_custom_exposed_type():
 
 
 def test_raise_no_data_without_header():
 def test_raise_no_data_without_header():
     not_existing_csv = CSVDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.csv", "has_header": False})
     not_existing_csv = CSVDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.csv", "has_header": False})
+    assert not_existing_csv.read() is None
     with pytest.raises(NoData):
     with pytest.raises(NoData):
-        assert not_existing_csv.read() is None
-        not_existing_csv.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_csv)
 
 
 
 
 def test_read_without_header_pandas():
 def test_read_without_header_pandas():

+ 11 - 12
tests/core/data/test_read_excel_data_node.py

@@ -18,6 +18,7 @@ import pandas as pd
 import pytest
 import pytest
 
 
 from taipy import Scope
 from taipy import Scope
+from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data.excel import ExcelDataNode
 from taipy.core.data.excel import ExcelDataNode
 from taipy.core.exceptions.exceptions import (
 from taipy.core.exceptions.exceptions import (
     ExposedTypeLengthMismatch,
     ExposedTypeLengthMismatch,
@@ -62,10 +63,10 @@ custom_pandas_numpy_exposed_type_dict = {"Sheet1": "pandas", "Sheet2": "numpy"}
 
 
 
 
 def test_raise_no_data_with_header():
 def test_raise_no_data_with_header():
+    not_existing_excel = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx"})
+    assert not_existing_excel.read() is None
     with pytest.raises(NoData):
     with pytest.raises(NoData):
-        not_existing_excel = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx"})
-        assert not_existing_excel.read() is None
-        not_existing_excel.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
 
 
 
 
 def test_read_empty_excel_with_header():
 def test_read_empty_excel_with_header():
@@ -79,12 +80,10 @@ def test_read_empty_excel_with_header():
 
 
 
 
 def test_raise_no_data_without_header():
 def test_raise_no_data_without_header():
+    not_existing_excel = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx", "has_header": False})
+    assert not_existing_excel.read() is None
     with pytest.raises(NoData):
     with pytest.raises(NoData):
-        not_existing_excel = ExcelDataNode(
-            "foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx", "has_header": False}
-        )
-        assert not_existing_excel.read() is None
-        not_existing_excel.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
 
 
 
 
 def test_read_empty_excel_without_header():
 def test_read_empty_excel_without_header():
@@ -103,9 +102,9 @@ def test_read_multi_sheet_with_header_no_data():
         Scope.SCENARIO,
         Scope.SCENARIO,
         properties={"path": "WRONG.xlsx", "sheet_name": ["sheet_name_1", "sheet_name_2"]},
         properties={"path": "WRONG.xlsx", "sheet_name": ["sheet_name_1", "sheet_name_2"]},
     )
     )
+    assert not_existing_excel.read() is None
     with pytest.raises(NoData):
     with pytest.raises(NoData):
-        assert not_existing_excel.read() is None
-        not_existing_excel.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
 
 
 
 
 def test_read_multi_sheet_without_header_no_data():
 def test_read_multi_sheet_without_header_no_data():
@@ -114,9 +113,9 @@ def test_read_multi_sheet_without_header_no_data():
         Scope.SCENARIO,
         Scope.SCENARIO,
         properties={"path": "WRONG.xlsx", "has_header": False, "sheet_name": ["sheet_name_1", "sheet_name_2"]},
         properties={"path": "WRONG.xlsx", "has_header": False, "sheet_name": ["sheet_name_1", "sheet_name_2"]},
     )
     )
+    assert not_existing_excel.read() is None
     with pytest.raises(NoData):
     with pytest.raises(NoData):
-        assert not_existing_excel.read() is None
-        not_existing_excel.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
 
 
 
 
 ########################## SINGLE SHEET ##########################
 ########################## SINGLE SHEET ##########################

+ 2 - 2
tests/core/data/test_read_parquet_data_node.py

@@ -68,9 +68,9 @@ class TestReadParquetDataNode:
         not_existing_parquet = ParquetDataNode(
         not_existing_parquet = ParquetDataNode(
             "foo", Scope.SCENARIO, properties={"path": "nonexistent.parquet", "engine": engine}
             "foo", Scope.SCENARIO, properties={"path": "nonexistent.parquet", "engine": engine}
         )
         )
+        assert not_existing_parquet.read() is None
         with pytest.raises(NoData):
         with pytest.raises(NoData):
-            assert not_existing_parquet.read() is None
-            not_existing_parquet.read_or_raise()
+            _DataManagerFactory._build_manager()._read(not_existing_parquet)
 
 
     @pytest.mark.parametrize("engine", __engine)
     @pytest.mark.parametrize("engine", __engine)
     def test_read_parquet_file_pandas(self, engine, parquet_file_path):
     def test_read_parquet_file_pandas(self, engine, parquet_file_path):