Ver código fonte

feat: export and import zip archive instead of a folder

trgiangdo 1 ano atrás
pai
commit
ffe57e4ee0

+ 14 - 18
taipy/core/exceptions/exceptions.py

@@ -339,10 +339,6 @@ class ModeNotAvailable(Exception):
     """Raised if the mode in JobConfig is not supported."""
     """Raised if the mode in JobConfig is not supported."""
 
 
 
 
-class InvalidExportPath(Exception):
-    """Raised if the export path is not valid."""
-
-
 class NonExistingVersion(Exception):
 class NonExistingVersion(Exception):
     """Raised if request a Version that is not known by the Version Manager."""
     """Raised if request a Version that is not known by the Version Manager."""
 
 
@@ -373,12 +369,12 @@ class FileCannotBeRead(Exception):
     """Raised when a file cannot be read."""
     """Raised when a file cannot be read."""
 
 
 
 
-class ExportFolderAlreadyExists(Exception):
+class ExportPathAlreadyExists(Exception):
     """Raised when the export folder already exists."""
     """Raised when the export folder already exists."""
 
 
-    def __init__(self, folder_path: str, scenario_id: str):
+    def __init__(self, export_path: str, scenario_id: str):
         self.message = (
         self.message = (
-            f"Folder '{folder_path}' already exists and can not be used to export scenario '{scenario_id}'."
+            f"The path '{export_path}' already exists and can not be used to export scenario '{scenario_id}'."
             " Please use the 'override' parameter to override it."
             " Please use the 'override' parameter to override it."
         )
         )
 
 
@@ -386,32 +382,32 @@ class ExportFolderAlreadyExists(Exception):
 class EntitiesToBeImportAlredyExist(Exception):
 class EntitiesToBeImportAlredyExist(Exception):
     """Raised when entities in the scenario to be imported have already exists"""
     """Raised when entities in the scenario to be imported have already exists"""
 
 
-    def __init__(self, folder_path):
-        self.message = f"The import folder {folder_path} contains entities that have already existed."
+    def __init__(self, import_path):
+        self.message = f"The import archive file {import_path} contains entities that have already existed."
 
 
 
 
 class DataToBeImportAlredyExist(Exception):
 class DataToBeImportAlredyExist(Exception):
     """Raised when data files in the scenario to be imported have already exists"""
     """Raised when data files in the scenario to be imported have already exists"""
 
 
-    def __init__(self, folder_path):
+    def __init__(self, import_path):
         self.message = (
         self.message = (
-            f"The import folder {folder_path} contains data files that have already existed."
+            f"The import archive file {import_path} contains data files that have already existed."
             " Please use the 'override' parameter to override those."
             " Please use the 'override' parameter to override those."
         )
         )
 
 
 
 
-class ImportFolderDoesntContainAnyScenario(Exception):
-    """Raised when the import folder doesn't contain any scenario"""
+class ImportArchiveDoesntContainAnyScenario(Exception):
+    """Raised when the import archive file doesn't contain any scenario"""
 
 
-    def __init__(self, folder_path):
-        self.message = f"The import folder {folder_path} doesn't contain any scenario."
+    def __init__(self, import_path):
+        self.message = f"The import archive file {import_path} doesn't contain any scenario."
 
 
 
 
 class ImportScenarioDoesntHaveAVersion(Exception):
 class ImportScenarioDoesntHaveAVersion(Exception):
-    """Raised when the import folder doesn't contain any scenario"""
+    """Raised when the import scenario doesn't have a version"""
 
 
-    def __init__(self, folder_path):
-        self.message = f"The import scenario in the folder {folder_path} doesn't have a version."
+    def __init__(self, import_path):
+        self.message = f"The import scenario in the import archive file {import_path} doesn't have a version."
 
 
 
 
 class SQLQueryCannotBeExecuted(Exception):
 class SQLQueryCannotBeExecuted(Exception):

+ 139 - 127
taipy/core/taipy.py

@@ -12,6 +12,8 @@
 import os
 import os
 import pathlib
 import pathlib
 import shutil
 import shutil
+import tempfile
+import zipfile
 from datetime import datetime
 from datetime import datetime
 from typing import Any, Callable, Dict, List, Optional, Set, Union, overload
 from typing import Any, Callable, Dict, List, Optional, Set, Union, overload
 
 
@@ -42,10 +44,9 @@ from .data.data_node_id import DataNodeId
 from .exceptions.exceptions import (
 from .exceptions.exceptions import (
     DataNodeConfigIsNotGlobal,
     DataNodeConfigIsNotGlobal,
     EntitiesToBeImportAlredyExist,
     EntitiesToBeImportAlredyExist,
-    ExportFolderAlreadyExists,
-    ImportFolderDoesntContainAnyScenario,
+    ExportPathAlreadyExists,
+    ImportArchiveDoesntContainAnyScenario,
     ImportScenarioDoesntHaveAVersion,
     ImportScenarioDoesntHaveAVersion,
-    InvalidExportPath,
     ModelNotFound,
     ModelNotFound,
     NonExistingVersion,
     NonExistingVersion,
     VersionIsNotProductionVersion,
     VersionIsNotProductionVersion,
@@ -947,18 +948,19 @@ def clean_all_entities(version_number: str) -> bool:
 
 
 def export_scenario(
 def export_scenario(
     scenario_id: ScenarioId,
     scenario_id: ScenarioId,
-    folder_path: Union[str, pathlib.Path],
+    output_path: Union[str, pathlib.Path],
     override: bool = False,
     override: bool = False,
     include_data: bool = False,
     include_data: bool = False,
 ):
 ):
-    """Export all related entities of a scenario to a folder.
+    """Export all related entities of a scenario to a archive zip file.
 
 
     This function exports all related entities of the specified scenario to the
     This function exports all related entities of the specified scenario to the
-    specified folder.
+    specified archive zip file.
 
 
     Parameters:
     Parameters:
         scenario_id (ScenarioId): The ID of the scenario to export.
         scenario_id (ScenarioId): The ID of the scenario to export.
-        folder_path (Union[str, pathlib.Path]): The folder path to export the scenario to.
+        output_path (Union[str, pathlib.Path]): The path to export the scenario to.
+            The path should include the file name without the extension or with the `.zip` extension.
             If the path exists and the override parameter is False, an exception is raised.
             If the path exists and the override parameter is False, an exception is raised.
         override (bool): If True, the existing folder will be overridden. Default is False.
         override (bool): If True, the existing folder will be overridden. Default is False.
         include_data (bool): If True, the file-based data nodes are exported as well.
         include_data (bool): If True, the file-based data nodes are exported as well.
@@ -967,7 +969,7 @@ def export_scenario(
             will not be exported. The default value is False.
             will not be exported. The default value is False.
 
 
     Raises:
     Raises:
-        ExportFolderAlreadyExist^: If the `folder_path` already exists and the override parameter is False.
+        ExportPathAlreadyExists^: If the `output_path` already exists and the override parameter is False.
     """
     """
     manager = _ScenarioManagerFactory._build_manager()
     manager = _ScenarioManagerFactory._build_manager()
     scenario = manager._get(scenario_id)
     scenario = manager._get(scenario_id)
@@ -976,41 +978,45 @@ def export_scenario(
     if scenario.cycle:
     if scenario.cycle:
         entity_ids.cycle_ids = {scenario.cycle.id}
         entity_ids.cycle_ids = {scenario.cycle.id}
 
 
-    if folder_path == Config.core.taipy_storage_folder:
-        raise InvalidExportPath("The export folder must not be the storage folder.")
+    output_filename = os.path.splitext(output_path)[0] if str(output_path).endswith(".zip") else str(output_path)
+    output_zip_path = pathlib.Path(output_filename + ".zip")
 
 
-    if os.path.exists(folder_path):
+    if output_zip_path.exists():
         if override:
         if override:
-            __logger.warning(f"Override the existing folder '{folder_path}'")
-            shutil.rmtree(folder_path, ignore_errors=True)
+            __logger.warning(f"Override the existing path '{output_zip_path}' to export scenario {scenario_id}.")
+            output_zip_path.unlink()
         else:
         else:
-            raise ExportFolderAlreadyExists(str(folder_path), scenario_id)
-
-    for data_node_id in entity_ids.data_node_ids:
-        _DataManagerFactory._build_manager()._export(data_node_id, folder_path, include_data=include_data)
-    for task_id in entity_ids.task_ids:
-        _TaskManagerFactory._build_manager()._export(task_id, folder_path)
-    for sequence_id in entity_ids.sequence_ids:
-        _SequenceManagerFactory._build_manager()._export(sequence_id, folder_path)
-    for cycle_id in entity_ids.cycle_ids:
-        _CycleManagerFactory._build_manager()._export(cycle_id, folder_path)
-    for scenario_id in entity_ids.scenario_ids:
-        _ScenarioManagerFactory._build_manager()._export(scenario_id, folder_path)
-    for job_id in entity_ids.job_ids:
-        _JobManagerFactory._build_manager()._export(job_id, folder_path)
-    for submission_id in entity_ids.submission_ids:
-        _SubmissionManagerFactory._build_manager()._export(submission_id, folder_path)
-    _VersionManagerFactory._build_manager()._export(scenario.version, folder_path)
-
-
-def import_scenario(folder_path: Union[str, pathlib.Path], override: bool = False) -> Optional[Scenario]:
-    """Import a folder containing an exported scenario into the current Taipy application.
-
-    The folder should contain all related entities of the scenario, and all entities should
-    belong to the same version that is compatible with the current Taipy application version.
-
-    Args:
-        folder_path (Union[str, pathlib.Path]): The folder path to the scenario to import.
+            raise ExportPathAlreadyExists(str(output_zip_path), scenario_id)
+
+    with tempfile.TemporaryDirectory() as tmp_dir:
+        for data_node_id in entity_ids.data_node_ids:
+            _DataManagerFactory._build_manager()._export(data_node_id, tmp_dir, include_data=include_data)
+        for task_id in entity_ids.task_ids:
+            _TaskManagerFactory._build_manager()._export(task_id, tmp_dir)
+        for sequence_id in entity_ids.sequence_ids:
+            _SequenceManagerFactory._build_manager()._export(sequence_id, tmp_dir)
+        for cycle_id in entity_ids.cycle_ids:
+            _CycleManagerFactory._build_manager()._export(cycle_id, tmp_dir)
+        for scenario_id in entity_ids.scenario_ids:
+            _ScenarioManagerFactory._build_manager()._export(scenario_id, tmp_dir)
+        for job_id in entity_ids.job_ids:
+            _JobManagerFactory._build_manager()._export(job_id, tmp_dir)
+        for submission_id in entity_ids.submission_ids:
+            _SubmissionManagerFactory._build_manager()._export(submission_id, tmp_dir)
+        _VersionManagerFactory._build_manager()._export(scenario.version, tmp_dir)
+
+        shutil.make_archive(output_filename, "zip", tmp_dir)
+
+
+def import_scenario(input_path: Union[str, pathlib.Path], override: bool = False) -> Optional[Scenario]:
+    """Import from an archive zip file containing an exported scenario into the current Taipy application.
+
+    The zip file should be created by the `taipy.import()^` method, which contains all related entities
+    of the scenario.
+    All entities should belong to the same version that is compatible with the current Taipy application version.
+
+    Parameters:
+        input_path (Union[str, pathlib.Path]): The path to the archive scenario to import.
             If the path doesn't exist, an exception is raised.
             If the path doesn't exist, an exception is raised.
         override (bool): If True, override the entities if existed. Default value is False.
         override (bool): If True, override the entities if existed. Default value is False.
 
 
@@ -1018,98 +1024,104 @@ def import_scenario(folder_path: Union[str, pathlib.Path], override: bool = Fals
         The imported scenario if the import is successful.
         The imported scenario if the import is successful.
 
 
     Raises:
     Raises:
-        FileNotFoundError: If the import folder path does not exist.
-        ImportFolderDoesntContainAnyScenario: If the import folder doesn't contain any scenario.
+        FileNotFoundError: If the import path does not exist.
+        ImportArchiveDoesntContainAnyScenario: If the unzip folder doesn't contain any scenario.
         ConflictedConfigurationError: If the configuration of the imported scenario is conflicted with the current one.
         ConflictedConfigurationError: If the configuration of the imported scenario is conflicted with the current one.
     """
     """
-    if isinstance(folder_path, str):
-        folder: pathlib.Path = pathlib.Path(folder_path)
+    if isinstance(input_path, str):
+        zip_file_path: pathlib.Path = pathlib.Path(input_path)
     else:
     else:
-        folder = folder_path
-
-    if not folder.exists():
-        raise FileNotFoundError(f"The import folder '{folder_path}' does not exist.")
-
-    if not ((folder / "scenarios").exists() or (folder / "scenario").exists()):
-        raise ImportFolderDoesntContainAnyScenario(folder_path)
-
-    if not (folder / "version").exists():
-        raise ImportScenarioDoesntHaveAVersion(folder_path)
-
-    entity_managers = {
-        "cycles": _CycleManagerFactory._build_manager,
-        "cycle": _CycleManagerFactory._build_manager,
-        "data_nodes": _DataManagerFactory._build_manager,
-        "data_node": _DataManagerFactory._build_manager,
-        "tasks": _TaskManagerFactory._build_manager,
-        "task": _TaskManagerFactory._build_manager,
-        "scenarios": _ScenarioManagerFactory._build_manager,
-        "scenario": _ScenarioManagerFactory._build_manager,
-        "jobs": _JobManagerFactory._build_manager,
-        "job": _JobManagerFactory._build_manager,
-        "submission": _SubmissionManagerFactory._build_manager,
-        "version": _VersionManagerFactory._build_manager,
-    }
-
-    # Import the version to check for compatibility
-    entity_managers["version"]()._import(next((folder / "version").iterdir()), "")
-
-    valid_entity_folders = list(entity_managers.keys())
-    valid_data_folder = Config.core.storage_folder
-
-    imported_scenario = None
-    imported_entities: Dict[str, List] = {}
-
-    for entity_folder in folder.iterdir():
-        if not entity_folder.is_dir() or entity_folder.name not in valid_entity_folders + [valid_data_folder]:
-            __logger.warning(f"{entity_folder} is not a valid Taipy folder and will not be imported.")
-            continue
-
-    try:
-        for entity_type in valid_entity_folders:
-            # Skip the version folder as it is already handled
-            if entity_type == "version":
-                continue
-
-            entity_folder = folder / entity_type
-            if not entity_folder.exists():
+        zip_file_path = input_path
+
+    if not zip_file_path.exists():
+        raise FileNotFoundError(f"The import archive path '{input_path}' does not exist.")
+
+    with tempfile.TemporaryDirectory() as tmp_dir:
+        with zipfile.ZipFile(zip_file_path) as zip_file:
+            zip_file.extractall(tmp_dir)
+
+        tmp_dir_path = pathlib.Path(tmp_dir)
+
+        if not ((tmp_dir_path / "scenarios").exists() or (tmp_dir_path / "scenario").exists()):
+            raise ImportArchiveDoesntContainAnyScenario(input_path)
+
+        if not (tmp_dir_path / "version").exists():
+            raise ImportScenarioDoesntHaveAVersion(input_path)
+
+        entity_managers = {
+            "cycles": _CycleManagerFactory._build_manager,
+            "cycle": _CycleManagerFactory._build_manager,
+            "data_nodes": _DataManagerFactory._build_manager,
+            "data_node": _DataManagerFactory._build_manager,
+            "tasks": _TaskManagerFactory._build_manager,
+            "task": _TaskManagerFactory._build_manager,
+            "scenarios": _ScenarioManagerFactory._build_manager,
+            "scenario": _ScenarioManagerFactory._build_manager,
+            "jobs": _JobManagerFactory._build_manager,
+            "job": _JobManagerFactory._build_manager,
+            "submission": _SubmissionManagerFactory._build_manager,
+            "version": _VersionManagerFactory._build_manager,
+        }
+
+        # Import the version to check for compatibility
+        entity_managers["version"]()._import(next((tmp_dir_path / "version").iterdir()), "")
+
+        valid_entity_folders = list(entity_managers.keys())
+        valid_data_folder = Config.core.storage_folder
+
+        imported_scenario = None
+        imported_entities: Dict[str, List] = {}
+
+        for entity_folder in tmp_dir_path.iterdir():
+            if not entity_folder.is_dir() or entity_folder.name not in valid_entity_folders + [valid_data_folder]:
+                __logger.warning(f"{entity_folder} is not a valid Taipy folder and will not be imported.")
                 continue
                 continue
 
 
-            manager = entity_managers[entity_type]()
-            imported_entities[entity_type] = []
-
-            for entity_file in entity_folder.iterdir():
-                # Check if the to-be-imported entity already exists
-                entity_id = entity_file.stem
-                if manager._exists(entity_id):
-                    if override:
-                        __logger.warning(f"{entity_id} already exists and will be overridden.")
-                    else:
-                        __logger.error(
-                            f"{entity_id} already exists. Please use the 'override' parameter to override it."
-                        )
-                        raise EntitiesToBeImportAlredyExist(folder_path)
-
-                # Import the entity
-                imported_entity = manager._import(
-                    entity_file,
-                    version=_VersionManagerFactory._build_manager()._get_latest_version(),
-                    data_folder=folder / valid_data_folder,
-                )
-
-                imported_entities[entity_type].append(imported_entity.id)
-                if entity_type in ["scenario", "scenarios"]:
-                    imported_scenario = imported_entity
-    except Exception as err:
-        __logger.error(f"An error occurred during the import: {err}. Rollback the import.")
-
-        # Rollback the import
-        for entity_type, entity_ids in list(imported_entities.items())[::-1]:
-            manager = entity_managers[entity_type]()
-            for entity_id in entity_ids:
-                if manager._exists(entity_id):
-                    manager._delete(entity_id)
-        return None
+        try:
+            for entity_type in valid_entity_folders:
+                # Skip the version folder as it is already handled
+                if entity_type == "version":
+                    continue
+
+                entity_folder = tmp_dir_path / entity_type
+                if not entity_folder.exists():
+                    continue
+
+                manager = entity_managers[entity_type]()
+                imported_entities[entity_type] = []
+
+                for entity_file in entity_folder.iterdir():
+                    # Check if the to-be-imported entity already exists
+                    entity_id = entity_file.stem
+                    if manager._exists(entity_id):
+                        if override:
+                            __logger.warning(f"{entity_id} already exists and will be overridden.")
+                        else:
+                            __logger.error(
+                                f"{entity_id} already exists. Please use the 'override' parameter to override it."
+                            )
+                            raise EntitiesToBeImportAlredyExist(input_path)
+
+                    # Import the entity
+                    imported_entity = manager._import(
+                        entity_file,
+                        version=_VersionManagerFactory._build_manager()._get_latest_version(),
+                        data_folder=tmp_dir_path / valid_data_folder,
+                    )
+
+                    imported_entities[entity_type].append(imported_entity.id)
+                    if entity_type in ["scenario", "scenarios"]:
+                        imported_scenario = imported_entity
+        except Exception as err:
+            __logger.error(f"An error occurred during the import: {err}. Rollback the import.")
+
+            # Rollback the import
+            for entity_type, entity_ids in list(imported_entities.items())[::-1]:
+                manager = entity_managers[entity_type]()
+                for entity_id in entity_ids:
+                    if manager._exists(entity_id):
+                        manager._delete(entity_id)
+            return None
 
 
     __logger.info(f"Scenario {imported_scenario.id} has been successfully imported.")  # type: ignore[union-attr]
     __logger.info(f"Scenario {imported_scenario.id} has been successfully imported.")  # type: ignore[union-attr]
     return imported_scenario
     return imported_scenario

+ 80 - 52
tests/core/test_taipy/test_export.py

@@ -10,21 +10,23 @@
 # specific language governing permissions and limitations under the License.
 # specific language governing permissions and limitations under the License.
 
 
 import os
 import os
-import shutil
+import zipfile
 
 
 import pandas as pd
 import pandas as pd
 import pytest
 import pytest
 
 
 import taipy.core.taipy as tp
 import taipy.core.taipy as tp
 from taipy import Config, Frequency, Scope
 from taipy import Config, Frequency, Scope
-from taipy.core.exceptions import ExportFolderAlreadyExists, InvalidExportPath
+from taipy.core.exceptions import ExportPathAlreadyExists
 
 
 
 
 @pytest.fixture(scope="function", autouse=True)
 @pytest.fixture(scope="function", autouse=True)
-def clean_tmp_folder():
-    shutil.rmtree("./tmp", ignore_errors=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
     yield
     yield
-    shutil.rmtree("./tmp", ignore_errors=True)
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
 
 
 
 
 def plus_1(x):
 def plus_1(x):
@@ -57,15 +59,28 @@ def configure_test_scenario(input_data, frequency=None):
     return scenario_cfg
     return scenario_cfg
 
 
 
 
-def test_export_scenario_to_the_storage_folder():
+def test_export_scenario_with_and_without_zip_extension(tmp_path):
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
+    tp.submit(scenario)
+
+    # Export without the .zip extension should create the tmp.zip file
+    tp.export_scenario(scenario.id, f"{tmp_path}/tmp")
+    assert os.path.exists(f"{tmp_path}/tmp.zip")
+
+    os.remove(f"{tmp_path}/tmp.zip")
 
 
-    with pytest.raises(InvalidExportPath):
-        tp.export_scenario(scenario.id, Config.core.taipy_storage_folder)
+    # Export with the .zip extension should also create the tmp.zip file
+    tp.export_scenario(scenario.id, f"{tmp_path}/tmp.zip")
+    assert os.path.exists(f"{tmp_path}/tmp.zip")
 
 
+    # Export with another extension should create the tmp.<extension>.zip file
+    tp.export_scenario(scenario.id, f"{tmp_path}/tmp.tar.gz")
+    assert os.path.exists(f"{tmp_path}/tmp.tar.gz.zip")
 
 
-def test_export_scenario_with_cycle():
+
+def test_export_scenario_with_cycle(tmp_path):
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
 
 
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
@@ -73,9 +88,11 @@ def test_export_scenario_with_cycle():
     jobs = submission.jobs
     jobs = submission.jobs
 
 
     # Export the submitted scenario
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
 
-    assert sorted(os.listdir("./tmp/exp_scenario/data_nodes")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/data_nodes")) == sorted(
         [
         [
             f"{scenario.i_1.id}.json",
             f"{scenario.i_1.id}.json",
             f"{scenario.o_1_csv.id}.json",
             f"{scenario.o_1_csv.id}.json",
@@ -84,7 +101,7 @@ def test_export_scenario_with_cycle():
             f"{scenario.o_1_json.id}.json",
             f"{scenario.o_1_json.id}.json",
         ]
         ]
     )
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/tasks")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/tasks")) == sorted(
         [
         [
             f"{scenario.t_1_csv.id}.json",
             f"{scenario.t_1_csv.id}.json",
             f"{scenario.t_1_excel.id}.json",
             f"{scenario.t_1_excel.id}.json",
@@ -92,32 +109,34 @@ def test_export_scenario_with_cycle():
             f"{scenario.t_1_json.id}.json",
             f"{scenario.t_1_json.id}.json",
         ]
         ]
     )
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/scenarios")) == sorted([f"{scenario.id}.json"])
-    assert sorted(os.listdir("./tmp/exp_scenario/jobs")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/scenarios")) == sorted([f"{scenario.id}.json"])
+    assert sorted(os.listdir(f"{tmp_path}/jobs")) == sorted(
         [f"{jobs[0].id}.json", f"{jobs[1].id}.json", f"{jobs[2].id}.json", f"{jobs[3].id}.json"]
         [f"{jobs[0].id}.json", f"{jobs[1].id}.json", f"{jobs[2].id}.json", f"{jobs[3].id}.json"]
     )
     )
-    assert os.listdir("./tmp/exp_scenario/submission") == [f"{submission.id}.json"]
-    assert sorted(os.listdir("./tmp/exp_scenario/cycles")) == sorted([f"{scenario.cycle.id}.json"])
+    assert os.listdir(f"{tmp_path}/submission") == [f"{submission.id}.json"]
+    assert sorted(os.listdir(f"{tmp_path}/cycles")) == sorted([f"{scenario.cycle.id}.json"])
 
 
 
 
-def test_export_scenario_without_cycle():
+def test_export_scenario_without_cycle(tmp_path):
     scenario_cfg = configure_test_scenario(1)
     scenario_cfg = configure_test_scenario(1)
 
 
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export the submitted scenario
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
 
-    assert os.path.exists("./tmp/exp_scenario/data_nodes")
-    assert os.path.exists("./tmp/exp_scenario/tasks")
-    assert os.path.exists("./tmp/exp_scenario/scenarios")
-    assert os.path.exists("./tmp/exp_scenario/jobs")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert not os.path.exists("./tmp/exp_scenario/cycles")  # No cycle
+    assert os.path.exists(f"{tmp_path}/data_nodes")
+    assert os.path.exists(f"{tmp_path}/tasks")
+    assert os.path.exists(f"{tmp_path}/scenarios")
+    assert os.path.exists(f"{tmp_path}/jobs")
+    assert os.path.exists(f"{tmp_path}/submission")
+    assert not os.path.exists(f"{tmp_path}/cycles")  # No cycle
 
 
 
 
-def test_export_scenario_override_existing_files():
+def test_export_scenario_override_existing_files(tmp_path):
     scenario_1_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_1_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_2_cfg = configure_test_scenario(2)
     scenario_2_cfg = configure_test_scenario(2)
 
 
@@ -125,45 +144,54 @@ def test_export_scenario_override_existing_files():
     tp.submit(scenario_1)
     tp.submit(scenario_1)
 
 
     # Export the submitted scenario_1
     # Export the submitted scenario_1
-    tp.export_scenario(scenario_1.id, "./tmp/exp_scenario")
-    assert os.path.exists("./tmp/exp_scenario/data_nodes")
-    assert os.path.exists("./tmp/exp_scenario/tasks")
-    assert os.path.exists("./tmp/exp_scenario/scenarios")
-    assert os.path.exists("./tmp/exp_scenario/jobs")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert os.path.exists("./tmp/exp_scenario/cycles")
+    tp.export_scenario(scenario_1.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_1")
+    assert os.path.exists(f"{tmp_path}/scenario_1/data_nodes")
+    assert os.path.exists(f"{tmp_path}/scenario_1/tasks")
+    assert os.path.exists(f"{tmp_path}/scenario_1/scenarios")
+    assert os.path.exists(f"{tmp_path}/scenario_1/jobs")
+    assert os.path.exists(f"{tmp_path}/scenario_1/submission")
+    assert os.path.exists(f"{tmp_path}/scenario_1/cycles")
 
 
     scenario_2 = tp.create_scenario(scenario_2_cfg)
     scenario_2 = tp.create_scenario(scenario_2_cfg)
     tp.submit(scenario_2)
     tp.submit(scenario_2)
 
 
-    # Export the submitted scenario_2 to the same folder should raise an error
-    with pytest.raises(ExportFolderAlreadyExists):
-        tp.export_scenario(scenario_2.id, "./tmp/exp_scenario")
+    # Export the submitted scenario_2 to the same path should raise an error
+    with pytest.raises(ExportPathAlreadyExists):
+        tp.export_scenario(scenario_2.id, "tmp.zip")
 
 
     # Export the submitted scenario_2 without a cycle and override the existing files
     # Export the submitted scenario_2 without a cycle and override the existing files
-    tp.export_scenario(scenario_2.id, "./tmp/exp_scenario", override=True)
-    assert os.path.exists("./tmp/exp_scenario/data_nodes")
-    assert os.path.exists("./tmp/exp_scenario/tasks")
-    assert os.path.exists("./tmp/exp_scenario/scenarios")
-    assert os.path.exists("./tmp/exp_scenario/jobs")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    # The cycles folder should be removed when overriding
-    assert not os.path.exists("./tmp/exp_scenario/cycles")
-
-
-def test_export_scenario_filesystem_with_data():
+    tp.export_scenario(scenario_2.id, "tmp.zip", override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_2")
+    assert os.path.exists(f"{tmp_path}/scenario_2/data_nodes")
+    assert os.path.exists(f"{tmp_path}/scenario_2/tasks")
+    assert os.path.exists(f"{tmp_path}/scenario_2/scenarios")
+    assert os.path.exists(f"{tmp_path}/scenario_2/jobs")
+    assert os.path.exists(f"{tmp_path}/scenario_2/submission")
+    # The cycles folder should not exists since the new scenario does not have a cycle
+    assert not os.path.exists(f"{tmp_path}/scenario_2/cycles")
+
+
+def test_export_scenario_filesystem_with_data(tmp_path):
     scenario_cfg = configure_test_scenario(1)
     scenario_cfg = configure_test_scenario(1)
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export scenario without data
     # Export scenario without data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
-    assert not os.path.exists("./tmp/exp_scenario/user_data")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_without_data")
+    assert not os.path.exists(f"{tmp_path}/scenario_without_data/user_data")
 
 
     # Export scenario with data
     # Export scenario with data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True, override=True)
-    assert os.path.exists("./tmp/exp_scenario/user_data")
-    data_files = [f for _, _, files in os.walk("./tmp/exp_scenario/user_data") for f in files]
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True, override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_with_data")
+    assert os.path.exists(f"{tmp_path}/scenario_with_data/user_data")
+
+    data_files = [f for _, _, files in os.walk(f"{tmp_path}/scenario_with_data/user_data") for f in files]
     assert sorted(data_files) == sorted(
     assert sorted(data_files) == sorted(
         [
         [
             f"{scenario.i_1.id}.p",
             f"{scenario.i_1.id}.p",
@@ -188,6 +216,6 @@ def test_export_non_file_based_data_node_raise_warning(caplog):
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export scenario with in-memory data node
     # Export scenario with in-memory data node
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True)
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True)
     expected_warning = f"Data node {scenario.o_mem.id} is not a file-based data node and the data will not be exported"
     expected_warning = f"Data node {scenario.o_mem.id} is not a file-based data node and the data will not be exported"
     assert expected_warning in caplog.text
     assert expected_warning in caplog.text

+ 63 - 56
tests/core/test_taipy/test_export_with_sql_repo.py

@@ -10,21 +10,23 @@
 # specific language governing permissions and limitations under the License.
 # specific language governing permissions and limitations under the License.
 
 
 import os
 import os
-import shutil
+import zipfile
 
 
 import pandas as pd
 import pandas as pd
 import pytest
 import pytest
 
 
 import taipy.core.taipy as tp
 import taipy.core.taipy as tp
 from taipy import Config, Frequency, Scope
 from taipy import Config, Frequency, Scope
-from taipy.core.exceptions import ExportFolderAlreadyExists, InvalidExportPath
+from taipy.core.exceptions import ExportPathAlreadyExists
 
 
 
 
 @pytest.fixture(scope="function", autouse=True)
 @pytest.fixture(scope="function", autouse=True)
-def clean_tmp_folder():
-    shutil.rmtree("./tmp", ignore_errors=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
     yield
     yield
-    shutil.rmtree("./tmp", ignore_errors=True)
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
 
 
 
 
 def plus_1(x):
 def plus_1(x):
@@ -57,15 +59,7 @@ def configure_test_scenario(input_data, frequency=None):
     return scenario_cfg
     return scenario_cfg
 
 
 
 
-def test_export_scenario_to_the_storage_folder(init_sql_repo):
-    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
-    scenario = tp.create_scenario(scenario_cfg)
-
-    with pytest.raises(InvalidExportPath):
-        tp.export_scenario(scenario.id, Config.core.taipy_storage_folder)
-
-
-def test_export_scenario_with_cycle(init_sql_repo):
+def test_export_scenario_with_cycle(tmp_path, init_sql_repo):
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
 
 
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
@@ -73,9 +67,11 @@ def test_export_scenario_with_cycle(init_sql_repo):
     jobs = submission.jobs
     jobs = submission.jobs
 
 
     # Export the submitted scenario
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
 
-    assert sorted(os.listdir("./tmp/exp_scenario/data_node")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/data_node")) == sorted(
         [
         [
             f"{scenario.i_1.id}.json",
             f"{scenario.i_1.id}.json",
             f"{scenario.o_1_csv.id}.json",
             f"{scenario.o_1_csv.id}.json",
@@ -84,7 +80,7 @@ def test_export_scenario_with_cycle(init_sql_repo):
             f"{scenario.o_1_json.id}.json",
             f"{scenario.o_1_json.id}.json",
         ]
         ]
     )
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/task")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/task")) == sorted(
         [
         [
             f"{scenario.t_1_csv.id}.json",
             f"{scenario.t_1_csv.id}.json",
             f"{scenario.t_1_excel.id}.json",
             f"{scenario.t_1_excel.id}.json",
@@ -92,32 +88,34 @@ def test_export_scenario_with_cycle(init_sql_repo):
             f"{scenario.t_1_json.id}.json",
             f"{scenario.t_1_json.id}.json",
         ]
         ]
     )
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/scenario")) == sorted([f"{scenario.id}.json"])
-    assert sorted(os.listdir("./tmp/exp_scenario/job")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/scenario")) == sorted([f"{scenario.id}.json"])
+    assert sorted(os.listdir(f"{tmp_path}/job")) == sorted(
         [f"{jobs[0].id}.json", f"{jobs[1].id}.json", f"{jobs[2].id}.json", f"{jobs[3].id}.json"]
         [f"{jobs[0].id}.json", f"{jobs[1].id}.json", f"{jobs[2].id}.json", f"{jobs[3].id}.json"]
     )
     )
-    assert os.listdir("./tmp/exp_scenario/submission") == [f"{submission.id}.json"]
-    assert sorted(os.listdir("./tmp/exp_scenario/cycle")) == sorted([f"{scenario.cycle.id}.json"])
+    assert os.listdir(f"{tmp_path}/submission") == [f"{submission.id}.json"]
+    assert sorted(os.listdir(f"{tmp_path}/cycle")) == sorted([f"{scenario.cycle.id}.json"])
 
 
 
 
-def test_export_scenario_without_cycle(init_sql_repo):
+def test_export_scenario_without_cycle(tmp_path, init_sql_repo):
     scenario_cfg = configure_test_scenario(1)
     scenario_cfg = configure_test_scenario(1)
 
 
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export the submitted scenario
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
 
-    assert os.path.exists("./tmp/exp_scenario/data_node")
-    assert os.path.exists("./tmp/exp_scenario/task")
-    assert os.path.exists("./tmp/exp_scenario/scenario")
-    assert os.path.exists("./tmp/exp_scenario/job")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert not os.path.exists("./tmp/exp_scenario/cycle")  # No cycle
+    assert os.path.exists(f"{tmp_path}/data_node")
+    assert os.path.exists(f"{tmp_path}/task")
+    assert os.path.exists(f"{tmp_path}/scenario")
+    assert os.path.exists(f"{tmp_path}/job")
+    assert os.path.exists(f"{tmp_path}/submission")
+    assert not os.path.exists(f"{tmp_path}/cycle")  # No cycle
 
 
 
 
-def test_export_scenario_override_existing_files(init_sql_repo):
+def test_export_scenario_override_existing_files(tmp_path, init_sql_repo):
     scenario_1_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_1_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_2_cfg = configure_test_scenario(2)
     scenario_2_cfg = configure_test_scenario(2)
 
 
@@ -125,45 +123,54 @@ def test_export_scenario_override_existing_files(init_sql_repo):
     tp.submit(scenario_1)
     tp.submit(scenario_1)
 
 
     # Export the submitted scenario_1
     # Export the submitted scenario_1
-    tp.export_scenario(scenario_1.id, "./tmp/exp_scenario")
-    assert os.path.exists("./tmp/exp_scenario/data_node")
-    assert os.path.exists("./tmp/exp_scenario/task")
-    assert os.path.exists("./tmp/exp_scenario/scenario")
-    assert os.path.exists("./tmp/exp_scenario/job")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert os.path.exists("./tmp/exp_scenario/cycle")
+    tp.export_scenario(scenario_1.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_1")
+    assert os.path.exists(f"{tmp_path}/scenario_1/data_node")
+    assert os.path.exists(f"{tmp_path}/scenario_1/task")
+    assert os.path.exists(f"{tmp_path}/scenario_1/scenario")
+    assert os.path.exists(f"{tmp_path}/scenario_1/job")
+    assert os.path.exists(f"{tmp_path}/scenario_1/submission")
+    assert os.path.exists(f"{tmp_path}/scenario_1/cycle")
 
 
     scenario_2 = tp.create_scenario(scenario_2_cfg)
     scenario_2 = tp.create_scenario(scenario_2_cfg)
     tp.submit(scenario_2)
     tp.submit(scenario_2)
 
 
     # Export the submitted scenario_2 to the same folder should raise an error
     # Export the submitted scenario_2 to the same folder should raise an error
-    with pytest.raises(ExportFolderAlreadyExists):
-        tp.export_scenario(scenario_2.id, "./tmp/exp_scenario")
+    with pytest.raises(ExportPathAlreadyExists):
+        tp.export_scenario(scenario_2.id, "tmp.zip")
 
 
     # Export the submitted scenario_2 without a cycle and override the existing files
     # Export the submitted scenario_2 without a cycle and override the existing files
-    tp.export_scenario(scenario_2.id, "./tmp/exp_scenario", override=True)
-    assert os.path.exists("./tmp/exp_scenario/data_node")
-    assert os.path.exists("./tmp/exp_scenario/task")
-    assert os.path.exists("./tmp/exp_scenario/scenario")
-    assert os.path.exists("./tmp/exp_scenario/job")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    # The cycles folder should be removed when overriding
-    assert not os.path.exists("./tmp/exp_scenario/cycle")
-
-
-def test_export_scenario_filesystem_with_data(init_sql_repo):
+    tp.export_scenario(scenario_2.id, "tmp.zip", override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_2")
+    assert os.path.exists(f"{tmp_path}/scenario_2/data_node")
+    assert os.path.exists(f"{tmp_path}/scenario_2/task")
+    assert os.path.exists(f"{tmp_path}/scenario_2/scenario")
+    assert os.path.exists(f"{tmp_path}/scenario_2/job")
+    assert os.path.exists(f"{tmp_path}/scenario_2/submission")
+    # The cycles folder should not exists since the new scenario does not have a cycle
+    assert not os.path.exists(f"{tmp_path}/scenario_2/cycle")
+
+
+def test_export_scenario_sql_repo_with_data(tmp_path, init_sql_repo):
     scenario_cfg = configure_test_scenario(1)
     scenario_cfg = configure_test_scenario(1)
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export scenario without data
     # Export scenario without data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
-    assert not os.path.exists("./tmp/exp_scenario/user_data")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_without_data")
+    assert not os.path.exists(f"{tmp_path}/scenario_without_data/user_data")
 
 
     # Export scenario with data
     # Export scenario with data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True, override=True)
-    assert os.path.exists("./tmp/exp_scenario/user_data")
-    data_files = [f for _, _, files in os.walk("./tmp/exp_scenario/user_data") for f in files]
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True, override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_with_data")
+    assert os.path.exists(f"{tmp_path}/scenario_with_data/user_data")
+
+    data_files = [f for _, _, files in os.walk(f"{tmp_path}/scenario_with_data/user_data") for f in files]
     assert sorted(data_files) == sorted(
     assert sorted(data_files) == sorted(
         [
         [
             f"{scenario.i_1.id}.p",
             f"{scenario.i_1.id}.p",
@@ -188,6 +195,6 @@ def test_export_non_file_based_data_node_raise_warning(init_sql_repo, caplog):
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export scenario with in-memory data node
     # Export scenario with in-memory data node
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True)
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True)
     expected_warning = f"Data node {scenario.o_mem.id} is not a file-based data node and the data will not be exported"
     expected_warning = f"Data node {scenario.o_mem.id} is not a file-based data node and the data will not be exported"
     assert expected_warning in caplog.text
     assert expected_warning in caplog.text

+ 28 - 17
tests/core/test_taipy/test_import.py

@@ -11,6 +11,7 @@
 
 
 import os
 import os
 import shutil
 import shutil
+import zipfile
 
 
 import pandas as pd
 import pandas as pd
 import pytest
 import pytest
@@ -22,7 +23,7 @@ from taipy.core.cycle._cycle_manager import _CycleManager
 from taipy.core.data._data_manager import _DataManager
 from taipy.core.data._data_manager import _DataManager
 from taipy.core.exceptions.exceptions import (
 from taipy.core.exceptions.exceptions import (
     ConflictedConfigurationError,
     ConflictedConfigurationError,
-    ImportFolderDoesntContainAnyScenario,
+    ImportArchiveDoesntContainAnyScenario,
     ImportScenarioDoesntHaveAVersion,
     ImportScenarioDoesntHaveAVersion,
 )
 )
 from taipy.core.job._job_manager import _JobManager
 from taipy.core.job._job_manager import _JobManager
@@ -32,10 +33,12 @@ from taipy.core.task._task_manager import _TaskManager
 
 
 
 
 @pytest.fixture(scope="function", autouse=True)
 @pytest.fixture(scope="function", autouse=True)
-def clean_tmp_folder():
-    shutil.rmtree("./tmp", ignore_errors=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
     yield
     yield
-    shutil.rmtree("./tmp", ignore_errors=True)
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
 
 
 
 
 def plus_1(x):
 def plus_1(x):
@@ -68,12 +71,12 @@ def configure_test_scenario(input_data, frequency=None):
     return scenario_cfg
     return scenario_cfg
 
 
 
 
-def export_test_scenario(scenario_cfg, folder_path="./tmp/exp_scenario", override=False, include_data=False):
+def export_test_scenario(scenario_cfg, export_path="tmp.zip", override=False, include_data=False):
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export the submitted scenario
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, folder_path, override, include_data)
+    tp.export_scenario(scenario.id, export_path, override, include_data)
     return scenario
     return scenario
 
 
 
 
@@ -84,7 +87,7 @@ def test_import_scenario_without_data(init_managers):
     init_managers()
     init_managers()
 
 
     assert _ScenarioManager._get_all() == []
     assert _ScenarioManager._get_all() == []
-    imported_scenario = tp.import_scenario("./tmp/exp_scenario")
+    imported_scenario = tp.import_scenario("tmp.zip")
 
 
     # The imported scenario should be the same as the exported scenario
     # The imported scenario should be the same as the exported scenario
     assert _ScenarioManager._get_all() == [imported_scenario]
     assert _ScenarioManager._get_all() == [imported_scenario]
@@ -106,7 +109,7 @@ def test_import_scenario_with_data(init_managers):
     init_managers()
     init_managers()
 
 
     assert _ScenarioManager._get_all() == []
     assert _ScenarioManager._get_all() == []
-    imported_scenario = tp.import_scenario("./tmp/exp_scenario")
+    imported_scenario = tp.import_scenario("tmp.zip")
 
 
     # All data of all data nodes should be imported
     # All data of all data nodes should be imported
     assert all(os.path.exists(dn.path) for dn in imported_scenario.data_nodes.values())
     assert all(os.path.exists(dn.path) for dn in imported_scenario.data_nodes.values())
@@ -133,7 +136,7 @@ def test_import_scenario_when_entities_are_already_existed_should_rollback(caplo
     assert len(_ScenarioManager._get_all()) == 0
     assert len(_ScenarioManager._get_all()) == 0
 
 
     # Import the scenario when the old entities still exist
     # Import the scenario when the old entities still exist
-    imported_entity = tp.import_scenario("./tmp/exp_scenario")
+    imported_entity = tp.import_scenario("tmp.zip")
     assert imported_entity is None
     assert imported_entity is None
     assert all(log.levelname in ["ERROR", "INFO"] for log in caplog.records)
     assert all(log.levelname in ["ERROR", "INFO"] for log in caplog.records)
     assert "An error occurred during the import" in caplog.text
     assert "An error occurred during the import" in caplog.text
@@ -150,7 +153,7 @@ def test_import_scenario_when_entities_are_already_existed_should_rollback(caplo
     caplog.clear()
     caplog.clear()
 
 
     # Import with override flag
     # Import with override flag
-    tp.import_scenario("./tmp/exp_scenario", override=True)
+    tp.import_scenario("tmp.zip", override=True)
     assert all(log.levelname in ["WARNING", "INFO"] for log in caplog.records)
     assert all(log.levelname in ["WARNING", "INFO"] for log in caplog.records)
     assert f"{submission_id} already exists and will be overridden" in caplog.text
     assert f"{submission_id} already exists and will be overridden" in caplog.text
 
 
@@ -174,7 +177,7 @@ def test_import_incompatible_scenario(init_managers):
     Config.configure_data_node("new_dn")
     Config.configure_data_node("new_dn")
 
 
     with pytest.raises(ConflictedConfigurationError):
     with pytest.raises(ConflictedConfigurationError):
-        tp.import_scenario("./tmp/exp_scenario")
+        tp.import_scenario("tmp.zip")
 
 
 
 
 def test_import_a_non_exists_folder():
 def test_import_a_non_exists_folder():
@@ -185,17 +188,25 @@ def test_import_a_non_exists_folder():
         tp.import_scenario("non_exists_folder")
         tp.import_scenario("non_exists_folder")
 
 
 
 
-def test_import_an_empty_folder(tmpdir_factory):
+def test_import_an_empty_archive(tmpdir_factory):
     empty_folder = tmpdir_factory.mktemp("empty_folder").strpath
     empty_folder = tmpdir_factory.mktemp("empty_folder").strpath
+    shutil.make_archive("tmp", "zip", empty_folder)
 
 
-    with pytest.raises(ImportFolderDoesntContainAnyScenario):
-        tp.import_scenario(empty_folder)
+    with pytest.raises(ImportArchiveDoesntContainAnyScenario):
+        tp.import_scenario("tmp.zip")
 
 
 
 
-def test_import_with_no_version():
+def test_import_with_no_version(tmp_path):
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     export_test_scenario(scenario_cfg)
     export_test_scenario(scenario_cfg)
-    shutil.rmtree("./tmp/exp_scenario/version")
+
+    # Extract the zip,
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
+    # remove the version,
+    shutil.rmtree(f"{tmp_path}/version")
+    # and archive the scenario without the version again
+    shutil.make_archive("tmp", "zip", tmp_path)
 
 
     with pytest.raises(ImportScenarioDoesntHaveAVersion):
     with pytest.raises(ImportScenarioDoesntHaveAVersion):
-        tp.import_scenario("./tmp/exp_scenario")
+        tp.import_scenario("tmp.zip")

+ 12 - 11
tests/core/test_taipy/test_import_with_sql_repo.py

@@ -10,7 +10,6 @@
 # specific language governing permissions and limitations under the License.
 # specific language governing permissions and limitations under the License.
 
 
 import os
 import os
-import shutil
 
 
 import pandas as pd
 import pandas as pd
 import pytest
 import pytest
@@ -28,10 +27,12 @@ from taipy.core.task._task_manager import _TaskManager
 
 
 
 
 @pytest.fixture(scope="function", autouse=True)
 @pytest.fixture(scope="function", autouse=True)
-def clean_tmp_folder():
-    shutil.rmtree("./tmp", ignore_errors=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
     yield
     yield
-    shutil.rmtree("./tmp", ignore_errors=True)
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
 
 
 
 
 def plus_1(x):
 def plus_1(x):
@@ -64,12 +65,12 @@ def configure_test_scenario(input_data, frequency=None):
     return scenario_cfg
     return scenario_cfg
 
 
 
 
-def export_test_scenario(scenario_cfg, folder_path="./tmp/exp_scenario", override=False, include_data=False):
+def export_test_scenario(scenario_cfg, export_path="tmp.zip", override=False, include_data=False):
     scenario = tp.create_scenario(scenario_cfg)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
     tp.submit(scenario)
 
 
     # Export the submitted scenario
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, folder_path, override, include_data)
+    tp.export_scenario(scenario.id, export_path, override, include_data)
     return scenario
     return scenario
 
 
 
 
@@ -80,7 +81,7 @@ def test_import_scenario_without_data(init_sql_repo, init_managers):
     init_managers()
     init_managers()
 
 
     assert _ScenarioManager._get_all() == []
     assert _ScenarioManager._get_all() == []
-    imported_scenario = tp.import_scenario("./tmp/exp_scenario")
+    imported_scenario = tp.import_scenario("tmp.zip")
 
 
     # The imported scenario should be the same as the exported scenario
     # The imported scenario should be the same as the exported scenario
     assert _ScenarioManager._get_all() == [imported_scenario]
     assert _ScenarioManager._get_all() == [imported_scenario]
@@ -102,7 +103,7 @@ def test_import_scenario_with_data(init_sql_repo, init_managers):
     init_managers()
     init_managers()
 
 
     assert _ScenarioManager._get_all() == []
     assert _ScenarioManager._get_all() == []
-    imported_scenario = tp.import_scenario("./tmp/exp_scenario")
+    imported_scenario = tp.import_scenario("tmp.zip")
 
 
     # All data of all data nodes should be imported
     # All data of all data nodes should be imported
     assert all(os.path.exists(dn.path) for dn in imported_scenario.data_nodes.values())
     assert all(os.path.exists(dn.path) for dn in imported_scenario.data_nodes.values())
@@ -129,7 +130,7 @@ def test_import_scenario_when_entities_are_already_existed_should_rollback(init_
     assert len(_ScenarioManager._get_all()) == 0
     assert len(_ScenarioManager._get_all()) == 0
 
 
     # Import the scenario when the old entities still exist
     # Import the scenario when the old entities still exist
-    imported_entity = tp.import_scenario("./tmp/exp_scenario")
+    imported_entity = tp.import_scenario("tmp.zip")
     assert imported_entity is None
     assert imported_entity is None
     assert all(log.levelname in ["ERROR", "INFO"] for log in caplog.records)
     assert all(log.levelname in ["ERROR", "INFO"] for log in caplog.records)
     assert "An error occurred during the import" in caplog.text
     assert "An error occurred during the import" in caplog.text
@@ -146,7 +147,7 @@ def test_import_scenario_when_entities_are_already_existed_should_rollback(init_
     caplog.clear()
     caplog.clear()
 
 
     # Import with override flag
     # Import with override flag
-    tp.import_scenario("./tmp/exp_scenario", override=True)
+    tp.import_scenario("tmp.zip", override=True)
     assert all(log.levelname in ["WARNING", "INFO"] for log in caplog.records)
     assert all(log.levelname in ["WARNING", "INFO"] for log in caplog.records)
     assert f"{submission_id} already exists and will be overridden" in caplog.text
     assert f"{submission_id} already exists and will be overridden" in caplog.text
 
 
@@ -170,4 +171,4 @@ def test_import_incompatible_scenario(init_sql_repo, init_managers):
     Config.configure_data_node("new_dn")
     Config.configure_data_node("new_dn")
 
 
     with pytest.raises(ConflictedConfigurationError):
     with pytest.raises(ConflictedConfigurationError):
-        tp.import_scenario("./tmp/exp_scenario")
+        tp.import_scenario("tmp.zip")