Kaynağa Gözat

Merge pull request #972 from Avaiga/feature/#922-remove-backup-mechanism

feature/#922 remove backup mechanism
Toan Quach 1 yıl önce
ebeveyn
işleme
32cdcac8d6

+ 0 - 1
taipy/core/README.md

@@ -53,7 +53,6 @@ Want to be part of the _Taipy Core_ community? Check out our
 
 - `taipy/`:
   - `core/`:
-    - `_backup/`: Internal package for Taipy data backup mechanism.
     - `_entity/`: Internal package for abstract entity definition and entity's properties management.
     - `_manager/`: Internal package for entity manager.
     - `_orchestrator/`: Internal package for task orchestrating and execution.

+ 0 - 10
taipy/core/_backup/__init__.py

@@ -1,10 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.

+ 0 - 60
taipy/core/_backup/_backup.py

@@ -1,60 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-
-import os
-
-from taipy.config import Config
-
-__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME = "TAIPY_BACKUP_FILE_PATH"
-
-
-def _init_backup_file_with_storage_folder():
-    if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME):
-        with open(preserve_file_path, "a") as f:
-            f.write(f"{Config.core.storage_folder}\n")
-
-
-def _append_to_backup_file(new_file_path: str):
-    if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME):
-        storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep
-        if not os.path.abspath(new_file_path).startswith(storage_folder):
-            with open(preserve_file_path, "a") as f:
-                f.write(f"{new_file_path}\n")
-
-
-def _remove_from_backup_file(to_remove_file_path: str):
-    if preserve_file_path := os.getenv(__BACKUP_FILE_PATH_ENVIRONMENT_VARIABLE_NAME, None):
-        storage_folder = os.path.abspath(Config.core.storage_folder) + os.sep
-        if not os.path.abspath(to_remove_file_path).startswith(storage_folder):
-            try:
-                with open(preserve_file_path, "r+") as f:
-                    old_backup = f.read()
-                    to_remove_file_path = to_remove_file_path + "\n"
-
-                    # To avoid removing the file path of different data nodes that are pointing
-                    # to the same file. We will only replace the file path only once.
-                    if old_backup.startswith(to_remove_file_path):
-                        new_backup = old_backup.replace(to_remove_file_path, "", 1)
-                    else:
-                        new_backup = old_backup.replace("\n" + to_remove_file_path, "\n", 1)
-
-                    if new_backup is not old_backup:
-                        f.seek(0)
-                        f.write(new_backup)
-                        f.truncate()
-            except Exception:
-                pass
-
-
-def _replace_in_backup_file(old_file_path: str, new_file_path: str):
-    _remove_from_backup_file(old_file_path)
-    _append_to_backup_file(new_file_path)

+ 0 - 2
taipy/core/_core.py

@@ -15,7 +15,6 @@ from typing import Optional
 from taipy.config import Config
 from taipy.logger._taipy_logger import _TaipyLogger
 
-from ._backup._backup import _init_backup_file_with_storage_folder
 from ._core_cli import _CoreCLI
 from ._orchestrator._dispatcher._job_dispatcher import _JobDispatcher
 from ._orchestrator._orchestrator import _Orchestrator
@@ -120,7 +119,6 @@ class Core:
         Config.check()
         cls.__logger.info("Blocking configuration update...")
         Config.block_update()
-        _init_backup_file_with_storage_folder()
 
     def __start_dispatcher(self, force_restart):
         self.__logger.info("Starting job dispatcher...")

+ 0 - 18
taipy/core/data/_data_manager.py

@@ -16,7 +16,6 @@ from taipy.config._config import _Config
 from taipy.config.common.scope import Scope
 from taipy.config.config import Config
 
-from .._backup._backup import _append_to_backup_file, _remove_from_backup_file
 from .._manager._manager import _Manager
 from .._version._version_mixin import _VersionMixin
 from ..config.data_node_config import DataNodeConfig
@@ -25,7 +24,6 @@ from ..exceptions.exceptions import InvalidDataNodeType
 from ..notification import Event, EventEntityType, EventOperation, Notifier, _make_event
 from ..scenario.scenario_id import ScenarioId
 from ..sequence.sequence_id import SequenceId
-from ._abstract_file import _AbstractFileDataNode
 from ._data_fs_repository import _DataFSRepository
 from .data_node import DataNode
 from .data_node_id import DataNodeId
@@ -74,8 +72,6 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
     ) -> DataNode:
         data_node = cls.__create(data_node_config, owner_id, parent_ids)
         cls._set(data_node)
-        if isinstance(data_node, _AbstractFileDataNode):
-            _append_to_backup_file(new_file_path=data_node._path)
         Notifier.publish(_make_event(data_node, EventOperation.CREATION))
         return data_node
 
@@ -124,21 +120,10 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         for data_node in data_nodes:
             cls._clean_pickle_file(data_node)
 
-    @classmethod
-    def _remove_dn_file_path_in_backup_file(cls, data_node: DataNode):
-        if isinstance(data_node, _AbstractFileDataNode):
-            _remove_from_backup_file(to_remove_file_path=data_node.path)
-
-    @classmethod
-    def _remove_dn_file_paths_in_backup_file(cls, data_nodes: Iterable[DataNode]):
-        for data_node in data_nodes:
-            cls._remove_dn_file_path_in_backup_file(data_node)
-
     @classmethod
     def _delete(cls, data_node_id: DataNodeId):
         if data_node := cls._get(data_node_id, None):
             cls._clean_pickle_file(data_node)
-            cls._remove_dn_file_path_in_backup_file(data_node)
         super()._delete(data_node_id)
 
     @classmethod
@@ -148,21 +133,18 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
             if data_node := cls._get(data_node_id):
                 data_nodes.append(data_node)
         cls._clean_pickle_files(data_nodes)
-        cls._remove_dn_file_paths_in_backup_file(data_nodes)
         super()._delete_many(data_node_ids)
 
     @classmethod
     def _delete_all(cls):
         data_nodes = cls._get_all()
         cls._clean_pickle_files(data_nodes)
-        cls._remove_dn_file_paths_in_backup_file(data_nodes)
         super()._delete_all()
 
     @classmethod
     def _delete_by_version(cls, version_number: str):
         data_nodes = cls._get_all(version_number)
         cls._clean_pickle_files(data_nodes)
-        cls._remove_dn_file_paths_in_backup_file(data_nodes)
         cls._repository._delete_by(attribute="version", value=version_number)
         Notifier.publish(
             Event(EventEntityType.DATA_NODE, EventOperation.DELETION, metadata={"delete_by_version": version_number})

+ 0 - 3
taipy/core/data/csv.py

@@ -20,7 +20,6 @@ import pandas as pd
 
 from taipy.config.common.scope import Scope
 
-from .._backup._backup import _replace_in_backup_file
 from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..job.job_id import JobId
@@ -168,10 +167,8 @@ class CSVDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
 
     @path.setter
     def path(self, value):
-        tmp_old_path = self._path
         self._path = value
         self.properties[self.__PATH_KEY] = value
-        _replace_in_backup_file(old_file_path=tmp_old_path, new_file_path=self._path)
 
     def _read(self):
         if self.properties[self._EXPOSED_TYPE_PROPERTY] == self._EXPOSED_TYPE_PANDAS:

+ 0 - 3
taipy/core/data/excel.py

@@ -20,7 +20,6 @@ from openpyxl import load_workbook
 
 from taipy.config.common.scope import Scope
 
-from .._backup._backup import _replace_in_backup_file
 from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..exceptions.exceptions import ExposedTypeLengthMismatch, NonExistingExcelSheet, SheetNameLengthMismatch
@@ -167,10 +166,8 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
 
     @path.setter
     def path(self, value):
-        tmp_old_path = self._path
         self._path = value
         self.properties[self.__PATH_KEY] = value
-        _replace_in_backup_file(old_file_path=tmp_old_path, new_file_path=self._path)
 
     @classmethod
     def storage_type(cls) -> str:

+ 0 - 3
taipy/core/data/json.py

@@ -20,7 +20,6 @@ from typing import Any, Dict, List, Optional, Set
 
 from taipy.config.common.scope import Scope
 
-from .._backup._backup import _replace_in_backup_file
 from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ._abstract_file import _AbstractFileDataNode
@@ -159,10 +158,8 @@ class JSONDataNode(DataNode, _AbstractFileDataNode):
 
     @path.setter
     def path(self, value):
-        tmp_old_path = self._path
         self._path = value
         self.properties[self.__PATH_KEY] = value
-        _replace_in_backup_file(old_file_path=tmp_old_path, new_file_path=self._path)
 
     @property  # type: ignore
     @_self_reload(DataNode._MANAGER_NAME)

+ 0 - 3
taipy/core/data/parquet.py

@@ -19,7 +19,6 @@ import pandas as pd
 
 from taipy.config.common.scope import Scope
 
-from .._backup._backup import _replace_in_backup_file
 from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..exceptions.exceptions import UnknownCompressionAlgorithm, UnknownParquetEngine
@@ -209,10 +208,8 @@ class ParquetDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode)
 
     @path.setter
     def path(self, value):
-        tmp_old_path = self._path
         self._path = value
         self.properties[self.__PATH_KEY] = value
-        _replace_in_backup_file(old_file_path=tmp_old_path, new_file_path=self._path)
 
     def _read(self):
         return self.read_with_kwargs()

+ 0 - 3
taipy/core/data/pickle.py

@@ -16,7 +16,6 @@ from typing import Any, List, Optional, Set
 
 from taipy.config.common.scope import Scope
 
-from .._backup._backup import _replace_in_backup_file
 from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ._abstract_file import _AbstractFileDataNode
@@ -143,11 +142,9 @@ class PickleDataNode(DataNode, _AbstractFileDataNode):
 
     @path.setter
     def path(self, value):
-        tmp_old_path = self._path
         self._path = value
         self.properties[self.__PATH_KEY] = value
         self.properties[self.__IS_GENERATED_KEY] = False
-        _replace_in_backup_file(old_file_path=tmp_old_path, new_file_path=self._path)
 
     @property  # type: ignore
     @_self_reload(DataNode._MANAGER_NAME)

+ 0 - 10
tests/core/_backup/__init__.py

@@ -1,10 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.

+ 0 - 308
tests/core/_backup/test_backup.py

@@ -1,308 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import os
-
-import pytest
-
-from taipy.config.config import Config
-from taipy.core import Core
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.data.csv import CSVDataNode
-from taipy.core.data.excel import ExcelDataNode
-from taipy.core.data.json import JSONDataNode
-from taipy.core.data.parquet import ParquetDataNode
-from taipy.core.data.pickle import PickleDataNode
-
-
-def read_backup_file(path):
-    with open(path, "r") as f:
-        lines = f.readlines()
-    return lines
-
-
-@pytest.fixture(scope="function", autouse=True)
-def init_backup_file():
-    os.environ["TAIPY_BACKUP_FILE_PATH"] = ".taipy_backups"
-    if os.path.exists(os.environ["TAIPY_BACKUP_FILE_PATH"]):
-        os.remove(os.environ["TAIPY_BACKUP_FILE_PATH"])
-
-    yield
-
-    if os.path.exists(".taipy_backups"):
-        os.remove(".taipy_backups")
-    del os.environ["TAIPY_BACKUP_FILE_PATH"]
-
-
-backup_file_path = ".taipy_backups"
-
-
-def test_backup_storage_folder_when_core_run():
-    core = Core()
-    core.run()
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{Config.core.storage_folder}\n"]
-    core.stop()
-
-
-def test_no_new_entry_when_file_is_in_storage_folder():
-    dn_cfg_1 = Config.configure_data_node("dn_cfg_1", path="dn_1.pickle")
-    dn_cfg_2 = Config.configure_data_node("dn_cfg_2")  # stored in .data folder
-
-    dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
-
-    dn_1.write("DN1_CONTENT")
-    dn_2.write("DN2_CONTENT")
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{dn_1.path}\n"]
-    os.remove(dn_1.path)
-
-
-def test_backup_csv_files():
-    dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "csv", path="example_1.csv")
-    dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "csv", path="example_2.csv")
-
-    csv_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(csv_dn_1, CSVDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{csv_dn_1.path}\n"]
-
-    csv_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
-    assert isinstance(csv_dn_2, CSVDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{csv_dn_1.path}\n", f"{csv_dn_2.path}\n"]
-
-    csv_dn_1.path = "example_3.csv"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_1.path}\n"]
-
-    csv_dn_2.path = "example_4.csv"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{csv_dn_1.path}\n", f"{csv_dn_2.path}\n"]
-
-    _DataManager._delete(csv_dn_1.id)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{csv_dn_2.path}\n"]
-
-    csv_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    csv_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(csv_dn_3, CSVDataNode)
-    assert isinstance(csv_dn_4, CSVDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_3.path}\n", f"{csv_dn_4.path}\n"]
-
-    csv_dn_4.path = "example_5.csv"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_3.path}\n", f"{csv_dn_4.path}\n"]
-
-    _DataManager._delete_all()
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == []
-
-
-def test_backup_excel_files():
-    dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "excel", path="example_1.xlsx")
-    dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "excel", path="example_2.xlsx")
-
-    excel_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(excel_dn_1, ExcelDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{excel_dn_1.path}\n"]
-
-    excel_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
-    assert isinstance(excel_dn_2, ExcelDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{excel_dn_1.path}\n", f"{excel_dn_2.path}\n"]
-
-    excel_dn_1.path = "example_3.excel"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_1.path}\n"]
-
-    excel_dn_2.path = "example_4.excel"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{excel_dn_1.path}\n", f"{excel_dn_2.path}\n"]
-
-    _DataManager._delete(excel_dn_1.id)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{excel_dn_2.path}\n"]
-
-    excel_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    excel_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(excel_dn_3, ExcelDataNode)
-    assert isinstance(excel_dn_4, ExcelDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_3.path}\n", f"{excel_dn_4.path}\n"]
-
-    excel_dn_4.path = "example_5.excel"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_3.path}\n", f"{excel_dn_4.path}\n"]
-
-    _DataManager._delete_all()
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == []
-
-
-def test_backup_pickle_files():
-    dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "pickle", path="example_1.p")
-    dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "pickle", path="example_2.p")
-
-    pickle_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(pickle_dn_1, PickleDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{pickle_dn_1.path}\n"]
-
-    pickle_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
-    assert isinstance(pickle_dn_2, PickleDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{pickle_dn_1.path}\n", f"{pickle_dn_2.path}\n"]
-
-    pickle_dn_1.path = "example_3.pickle"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_1.path}\n"]
-
-    pickle_dn_2.path = "example_4.pickle"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{pickle_dn_1.path}\n", f"{pickle_dn_2.path}\n"]
-
-    _DataManager._delete(pickle_dn_1.id)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{pickle_dn_2.path}\n"]
-
-    pickle_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    pickle_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(pickle_dn_3, PickleDataNode)
-    assert isinstance(pickle_dn_4, PickleDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_3.path}\n", f"{pickle_dn_4.path}\n"]
-
-    pickle_dn_4.path = "example_5.pickle"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_3.path}\n", f"{pickle_dn_4.path}\n"]
-
-    _DataManager._delete_all()
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == []
-
-
-def test_backup_json_files():
-    dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "json", path="example_1.json")
-    dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "json", path="example_2.json")
-
-    json_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(json_dn_1, JSONDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{json_dn_1.path}\n"]
-
-    json_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
-    assert isinstance(json_dn_2, JSONDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{json_dn_1.path}\n", f"{json_dn_2.path}\n"]
-
-    json_dn_1.path = "example_3.json"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_1.path}\n"]
-
-    json_dn_2.path = "example_4.json"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{json_dn_1.path}\n", f"{json_dn_2.path}\n"]
-
-    _DataManager._delete(json_dn_1.id)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{json_dn_2.path}\n"]
-
-    json_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    json_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(json_dn_3, JSONDataNode)
-    assert isinstance(json_dn_4, JSONDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_3.path}\n", f"{json_dn_4.path}\n"]
-
-    json_dn_4.path = "example_5.json"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_3.path}\n", f"{json_dn_4.path}\n"]
-
-    _DataManager._delete_all()
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == []
-
-
-def test_backup_parquet_files():
-    dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "parquet", path="example_1.parquet")
-    dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "parquet", path="example_2.parquet")
-
-    parquet_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(parquet_dn_1, ParquetDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{parquet_dn_1.path}\n"]
-
-    parquet_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
-    assert isinstance(parquet_dn_2, ParquetDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{parquet_dn_1.path}\n", f"{parquet_dn_2.path}\n"]
-
-    parquet_dn_1.path = "example_3.parquet"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_1.path}\n"]
-
-    parquet_dn_2.path = "example_4.parquet"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{parquet_dn_1.path}\n", f"{parquet_dn_2.path}\n"]
-
-    _DataManager._delete(parquet_dn_1.id)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{parquet_dn_2.path}\n"]
-
-    parquet_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    parquet_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
-    assert isinstance(parquet_dn_3, ParquetDataNode)
-    assert isinstance(parquet_dn_4, ParquetDataNode)
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_3.path}\n", f"{parquet_dn_4.path}\n"]
-
-    parquet_dn_4.path = "example_5.parquet"
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_3.path}\n", f"{parquet_dn_4.path}\n"]
-
-    _DataManager._delete_all()
-
-    backup_files = read_backup_file(backup_file_path)
-    assert backup_files == []
-
-
-def test_no_backup_if_no_env_var():
-    dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "csv", path="example_1.csv")
-    _DataManager._create_and_set(dn_cfg_1, None, None)