Explorar el Código

fix: clean all data entities should also clean all generated files

trgiangdo hace 1 año
padre
commit
2482229e1e
Se han modificado 2 ficheros con 22 adiciones y 19 borrados
  1. 9 9
      taipy/core/data/_data_manager.py
  2. 13 10
      tests/core/data/test_data_manager.py

+ 9 - 9
taipy/core/data/_data_manager.py

@@ -24,10 +24,10 @@ from ..exceptions.exceptions import InvalidDataNodeType
 from ..notification import Event, EventEntityType, EventOperation, Notifier, _make_event
 from ..scenario.scenario_id import ScenarioId
 from ..sequence.sequence_id import SequenceId
+from ._abstract_file import _FileDataNodeMixin
 from ._data_fs_repository import _DataFSRepository
 from .data_node import DataNode
 from .data_node_id import DataNodeId
-from .pickle import PickleDataNode
 
 
 class _DataManager(_Manager[DataNode], _VersionMixin):
@@ -109,21 +109,21 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         return cls._repository._load_all(filters)
 
     @classmethod
-    def _clean_pickle_file(cls, data_node: DataNode):
-        if not isinstance(data_node, PickleDataNode):
+    def _clean_generated_file(cls, data_node: DataNode):
+        if not isinstance(data_node, _FileDataNodeMixin):
             return
         if data_node.is_generated and os.path.exists(data_node.path):
             os.remove(data_node.path)
 
     @classmethod
-    def _clean_pickle_files(cls, data_nodes: Iterable[DataNode]):
+    def _clean_generated_files(cls, data_nodes: Iterable[DataNode]):
         for data_node in data_nodes:
-            cls._clean_pickle_file(data_node)
+            cls._clean_generated_file(data_node)
 
     @classmethod
     def _delete(cls, data_node_id: DataNodeId):
         if data_node := cls._get(data_node_id, None):
-            cls._clean_pickle_file(data_node)
+            cls._clean_generated_file(data_node)
         super()._delete(data_node_id)
 
     @classmethod
@@ -132,19 +132,19 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         for data_node_id in data_node_ids:
             if data_node := cls._get(data_node_id):
                 data_nodes.append(data_node)
-        cls._clean_pickle_files(data_nodes)
+        cls._clean_generated_files(data_nodes)
         super()._delete_many(data_node_ids)
 
     @classmethod
     def _delete_all(cls):
         data_nodes = cls._get_all()
-        cls._clean_pickle_files(data_nodes)
+        cls._clean_generated_files(data_nodes)
         super()._delete_all()
 
     @classmethod
     def _delete_by_version(cls, version_number: str):
         data_nodes = cls._get_all(version_number)
-        cls._clean_pickle_files(data_nodes)
+        cls._clean_generated_files(data_nodes)
         cls._repository._delete_by(attribute="version", value=version_number)
         Notifier.publish(
             Event(EventEntityType.DATA_NODE, EventOperation.DELETION, metadata={"delete_by_version": version_number})

+ 13 - 10
tests/core/data/test_data_manager.py

@@ -8,6 +8,7 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
+
 import os
 import pathlib
 
@@ -77,7 +78,9 @@ class TestDataManager:
         assert _DataManager._get(csv_dn.id).job_ids == csv_dn.job_ids
         assert not _DataManager._get(csv_dn.id).is_ready_for_reading
         assert _DataManager._get(csv_dn.id).is_ready_for_reading == csv_dn.is_ready_for_reading
-        assert len(_DataManager._get(csv_dn.id).properties) == 4
+        assert (
+            len(_DataManager._get(csv_dn.id).properties) == 5
+        )  # path, encoding, has_header, exposed_type, is_generated
         assert _DataManager._get(csv_dn.id).properties.get("path") == "bar"
         assert _DataManager._get(csv_dn.id).properties.get("encoding") == "utf-8"
         assert _DataManager._get(csv_dn.id).properties.get("has_header") is True
@@ -103,7 +106,7 @@ class TestDataManager:
         assert _DataManager._get(csv_dn).job_ids == csv_dn.job_ids
         assert not _DataManager._get(csv_dn).is_ready_for_reading
         assert _DataManager._get(csv_dn).is_ready_for_reading == csv_dn.is_ready_for_reading
-        assert len(_DataManager._get(csv_dn).properties) == 4
+        assert len(_DataManager._get(csv_dn).properties) == 5  # path, encoding, has_header, exposed_type, is_generated
         assert _DataManager._get(csv_dn).properties.get("path") == "bar"
         assert _DataManager._get(csv_dn).properties.get("encoding") == "utf-8"
         assert _DataManager._get(csv_dn).properties.get("has_header") is True
@@ -118,7 +121,7 @@ class TestDataManager:
         dn = _DataManager._create_and_set(config, None, None)
 
         assert _DataManager._get(dn.id).last_edit_date is None
-        assert len(_DataManager._get(dn.id).properties) == 1
+        assert len(_DataManager._get(dn.id).properties) == 2  # is_generated and path
         assert _DataManager._get(dn.id).properties.get("is_generated")
         assert not _DataManager._get(dn.id).edit_in_progress
         assert _DataManager._get(dn.id)._editor_id is None
@@ -127,7 +130,7 @@ class TestDataManager:
         dn.lock_edit("foo")
 
         assert _DataManager._get(dn.id).last_edit_date is None
-        assert len(_DataManager._get(dn.id).properties) == 1
+        assert len(_DataManager._get(dn.id).properties) == 2  # is_generated and path
         assert _DataManager._get(dn.id).properties.get("is_generated")
         assert _DataManager._get(dn.id).edit_in_progress
         assert _DataManager._get(dn.id).editor_id == "foo"
@@ -136,7 +139,7 @@ class TestDataManager:
         dn.unlock_edit("foo")
 
         assert _DataManager._get(dn.id).last_edit_date is None
-        assert len(_DataManager._get(dn.id).properties) == 1
+        assert len(_DataManager._get(dn.id).properties) == 2  # is_generated and path
         assert _DataManager._get(dn.id).properties.get("is_generated")
         assert not _DataManager._get(dn.id).edit_in_progress
         assert _DataManager._get(dn.id).editor_id is None
@@ -226,7 +229,7 @@ class TestDataManager:
         assert _DataManager._get(pickle_dn.id).job_ids == pickle_dn.job_ids
         assert not _DataManager._get(pickle_dn.id).is_ready_for_reading
         assert _DataManager._get(pickle_dn.id).is_ready_for_reading == pickle_dn.is_ready_for_reading
-        assert len(_DataManager._get(pickle_dn.id).properties) == 1
+        assert len(_DataManager._get(pickle_dn.id).properties) == 2  # is_generated and path
         assert _DataManager._get(pickle_dn.id).properties == pickle_dn.properties
 
         assert _DataManager._get(pickle_dn) is not None
@@ -245,7 +248,7 @@ class TestDataManager:
         assert _DataManager._get(pickle_dn).job_ids == pickle_dn.job_ids
         assert not _DataManager._get(pickle_dn).is_ready_for_reading
         assert _DataManager._get(pickle_dn).is_ready_for_reading == pickle_dn.is_ready_for_reading
-        assert len(_DataManager._get(pickle_dn).properties) == 1
+        assert len(_DataManager._get(pickle_dn).properties) == 2  # is_generated and path
         assert _DataManager._get(pickle_dn).properties == pickle_dn.properties
 
     def test_create_raises_exception_with_wrong_type(self):
@@ -459,7 +462,7 @@ class TestDataManager:
 
         dm._delete_all()
 
-    def test_clean_generated_pickle_files(self, pickle_file_path):
+    def test_clean_generated_files(self, pickle_file_path):
         user_pickle_dn_config = Config.configure_data_node(
             id="d1", storage_type="pickle", path=pickle_file_path, default_data="d"
         )
@@ -474,10 +477,10 @@ class TestDataManager:
         generated_pickle_dn_1 = dns[generated_pickle_dn_1_config]
         generated_pickle_dn_2 = dns[generated_pickle_dn_2_config]
 
-        _DataManager._clean_pickle_file(user_pickle_dn.id)
+        _DataManager._clean_generated_file(user_pickle_dn.id)
         assert file_exists(user_pickle_dn.path)
 
-        _DataManager._clean_pickle_files([generated_pickle_dn_1, generated_pickle_dn_2])
+        _DataManager._clean_generated_files([generated_pickle_dn_1, generated_pickle_dn_2])
         assert not file_exists(generated_pickle_dn_1.path)
         assert not file_exists(generated_pickle_dn_2.path)