Explorar o código

Merge pull request #779 from Avaiga/feature/split-data-folder

Split .data folder
João André hai 1 ano
pai
achega
f217f7ba0e

+ 2 - 0
.gitignore

@@ -18,6 +18,8 @@ dist/
 
 # Core .data directory
 .data/
+.taipy/
+user_data/
 
 # demo files
 demo-*

+ 2 - 0
taipy/config/.gitignore

@@ -81,6 +81,8 @@ dataset
 
 # Filesystem default local storage
 .data/
+user_data/
+.taipy/
 
 # python notebook
 *.ipynb

+ 2 - 0
taipy/core/.gitignore

@@ -82,6 +82,8 @@ dataset
 # Filesystem default local storage
 .data/
 .my_data/
+user_data/
+.taipy/
 
 # python notebook
 *.ipynb

+ 4 - 3
taipy/core/_entity/_migrate_cli.py

@@ -13,6 +13,7 @@ import sys
 from typing import List
 
 from taipy._cli._base_cli import _CLI
+from taipy.config.config import Config
 from taipy.logger._taipy_logger import _TaipyLogger
 
 from ._migrate import (
@@ -84,7 +85,7 @@ class _MigrateCLI:
     @classmethod
     def __handle_remove_backup(cls, repository_type: str, repository_args: List):
         if repository_type == "filesystem":
-            path = repository_args[0] or ".data"
+            path = repository_args[0] or Config.core.taipy_storage_folder
             if not _remove_backup_file_entities(path):
                 sys.exit(1)
         elif repository_type == "sql":
@@ -102,7 +103,7 @@ class _MigrateCLI:
     @classmethod
     def __handle_restore_backup(cls, repository_type: str, repository_args: List):
         if repository_type == "filesystem":
-            path = repository_args[0] or ".data"
+            path = repository_args[0] or Config.core.taipy_storage_folder
             if not _restore_migrate_file_entities(path):
                 sys.exit(1)
         elif repository_type == "sql":
@@ -120,7 +121,7 @@ class _MigrateCLI:
     @classmethod
     def __migrate_entities(cls, repository_type: str, repository_args: List, do_backup: bool):
         if repository_type == "filesystem":
-            path = repository_args[0] or ".data"
+            path = repository_args[0] or Config.core.taipy_storage_folder
             if not _migrate_fs_entities(path, do_backup):
                 sys.exit(1)
 

+ 1 - 1
taipy/core/_repository/_filesystem_repository.py

@@ -52,7 +52,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
 
     @property
     def _storage_folder(self) -> pathlib.Path:
-        return pathlib.Path(Config.core.storage_folder)
+        return pathlib.Path(Config.core.taipy_storage_folder)
 
     ###############################
     # ##   Inherited methods   ## #

+ 33 - 7
taipy/core/config/core_section.py

@@ -28,9 +28,10 @@ class CoreSection(UniqueSection):
 
     Attributes:
         root_folder (str): Path of the base folder for the taipy application. The default value is "./taipy/"
-        storage_folder (str): Folder name used to store Taipy data. The default value is ".data/". It is used in
-            conjunction with the *root_folder* attribute. That means the storage path is <root_folder><storage_folder>
-            (The default path is "./taipy/.data/").
+        storage_folder (str): Folder name used to store user data. The default value is "user_data/". The default
+            path is "user_data/".
+        taipy_storage_folder (str): Folder name used to store Taipy data. The default value is ".taipy/". The default
+            path is "./taipy/".
         repository_type (str): Type of the repository to be used to store Taipy data. The default value is
             "filesystem".
         repository_properties (Dict[str, Union[str, int]]): A dictionary of additional properties to be used by the
@@ -54,7 +55,10 @@ class CoreSection(UniqueSection):
     _DEFAULT_ROOT_FOLDER = "./taipy/"
 
     _STORAGE_FOLDER_KEY = "storage_folder"
-    _DEFAULT_STORAGE_FOLDER = ".data/"
+    _DEFAULT_STORAGE_FOLDER = "user_data/"
+
+    _STORAGE_FOLDER_TP_KEY = "taipy_storage_folder"
+    _DEFAULT_STORAGE_FOLDER_TP = ".taipy/"
 
     _REPOSITORY_TYPE_KEY = "repository_type"
     _DEFAULT_REPOSITORY_TYPE = "filesystem"
@@ -83,6 +87,7 @@ class CoreSection(UniqueSection):
         self,
         root_folder: Optional[str] = None,
         storage_folder: Optional[str] = None,
+        taipy_storage_folder: Optional[str] = None,
         repository_type: Optional[str] = None,
         repository_properties: Optional[Dict[str, Union[str, int]]] = None,
         read_entity_retry: Optional[int] = None,
@@ -94,6 +99,7 @@ class CoreSection(UniqueSection):
     ):
         self._root_folder = root_folder
         self._storage_folder = storage_folder
+        self._taipy_storage_folder = taipy_storage_folder
         self._repository_type = repository_type
         self._repository_properties = repository_properties or {}
         self._read_entity_retry = (
@@ -112,6 +118,7 @@ class CoreSection(UniqueSection):
         return CoreSection(
             self.root_folder,
             self.storage_folder,
+            self.taipy_storage_folder,
             self.repository_type,
             self.repository_properties,
             self.read_entity_retry,
@@ -131,6 +138,15 @@ class CoreSection(UniqueSection):
     def storage_folder(self, val):
         self._storage_folder = val
 
+    @property
+    def taipy_storage_folder(self):
+        return _tpl._replace_templates(self._taipy_storage_folder)
+
+    @taipy_storage_folder.setter  # type: ignore
+    @_ConfigBlocker._check()
+    def taipy_storage_folder(self, val):
+        self._taipy_storage_folder = val
+
     @property
     def root_folder(self):
         return _tpl._replace_templates(self._root_folder)
@@ -203,6 +219,7 @@ class CoreSection(UniqueSection):
         return CoreSection(
             cls._DEFAULT_ROOT_FOLDER,
             cls._DEFAULT_STORAGE_FOLDER,
+            cls._DEFAULT_STORAGE_FOLDER_TP,
             cls._DEFAULT_REPOSITORY_TYPE,
             cls._DEFAULT_REPOSITORY_PROPERTIES,
             cls._DEFAULT_READ_ENTITY_RETRY,
@@ -215,6 +232,7 @@ class CoreSection(UniqueSection):
     def _clean(self):
         self._root_folder = self._DEFAULT_ROOT_FOLDER
         self._storage_folder = self._DEFAULT_STORAGE_FOLDER
+        self._taipy_storage_folder = self._DEFAULT_STORAGE_FOLDER
         self._repository_type = self._DEFAULT_REPOSITORY_TYPE
         self._repository_properties = self._DEFAULT_REPOSITORY_PROPERTIES.copy()
         self._read_entity_retry = self._DEFAULT_READ_ENTITY_RETRY
@@ -230,6 +248,8 @@ class CoreSection(UniqueSection):
             as_dict[self._ROOT_FOLDER_KEY] = self._root_folder
         if self._storage_folder:
             as_dict[self._STORAGE_FOLDER_KEY] = self._storage_folder
+        if self._taipy_storage_folder:
+            as_dict[self._STORAGE_FOLDER_TP_KEY] = self._taipy_storage_folder
         if self._repository_type:
             as_dict[self._REPOSITORY_TYPE_KEY] = self._repository_type
         if self._repository_properties:
@@ -251,6 +271,7 @@ class CoreSection(UniqueSection):
     def _from_dict(cls, as_dict: Dict[str, Any], id=None, config: Optional[_Config] = None):
         root_folder = as_dict.pop(cls._ROOT_FOLDER_KEY, None)
         storage_folder = as_dict.pop(cls._STORAGE_FOLDER_KEY, None)
+        taipy_storage_folder = as_dict.pop(cls._STORAGE_FOLDER_TP_KEY, None)
         repository_type = as_dict.pop(cls._REPOSITORY_TYPE_KEY, None)
         repository_properties = as_dict.pop(cls._REPOSITORY_PROPERTIES_KEY, None)
         read_entity_retry = as_dict.pop(cls._READ_ENTITY_RETRY_KEY, None)
@@ -261,6 +282,7 @@ class CoreSection(UniqueSection):
         return CoreSection(
             root_folder,
             storage_folder,
+            taipy_storage_folder,
             repository_type,
             repository_properties,
             read_entity_retry,
@@ -274,6 +296,7 @@ class CoreSection(UniqueSection):
     def _update(self, as_dict: Dict[str, Any], default_section=None):
         self._root_folder = as_dict.pop(self._ROOT_FOLDER_KEY, self._root_folder)
         self._storage_folder = as_dict.pop(self._STORAGE_FOLDER_KEY, self._storage_folder)
+        self._taipy_storage_folder = as_dict.pop(self._STORAGE_FOLDER_TP_KEY, self._taipy_storage_folder)
         self._repository_type = as_dict.pop(self._REPOSITORY_TYPE_KEY, self._repository_type)
         self._repository_properties.update(as_dict.pop(self._REPOSITORY_PROPERTIES_KEY, self._repository_properties))
         self._read_entity_retry = as_dict.pop(self._READ_ENTITY_RETRY_KEY, self._read_entity_retry)
@@ -324,9 +347,12 @@ class CoreSection(UniqueSection):
         Parameters:
             root_folder (Optional[str]): Path of the base folder for the taipy application.
                 The default value is "./taipy/"
-            storage_folder (Optional[str]): Folder name used to store Taipy data. The default value is ".data/".
-                It is used in conjunction with the `root_folder` field. That means the storage path is
-                <root_folder><storage_folder> (The default path is "./taipy/.data/").
+            storage_folder (str): Folder name used to store user data. The default value is "user_data/". It is used in
+                conjunction with the *root_folder* attribute. That means the storage path is
+                <root_folder><storage_folder> (The default path is "./taipy/user_data/").
+            taipy_storage_folder (str): Folder name used to store Taipy data. The default value is ".taipy/". It is
+                used in conjunction with the *root_folder* attribute. That means the storage path is
+                <root_folder><storage_folder> (The default path is "./taipy/.taipy/").
             repository_type (Optional[str]): The type of the repository to be used to store Taipy data.
                 The default value is "filesystem".
             repository_properties (Optional[Dict[str, Union[str, int]]]): A dictionary of additional properties

+ 8 - 1
taipy/core/data/_abstract_file.py

@@ -8,8 +8,9 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
-
+import os
 import pathlib
+import shutil
 
 
 class _AbstractFileDataNode(object):
@@ -26,3 +27,9 @@ class _AbstractFileDataNode(object):
         if not dir_path.exists():
             dir_path.mkdir(parents=True, exist_ok=True)
         return dir_path / f"{self.id}.{self.__EXTENSION_MAP.get(storage_type)}"
+
+    def _migrate_path(self, storage_type, old_path):
+        new_path = self._build_path(storage_type)
+        if os.path.exists(old_path):
+            shutil.move(old_path, new_path)
+        return new_path

+ 5 - 2
taipy/core/data/csv.py

@@ -124,12 +124,13 @@ class CSVDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
         _AbstractTabularDataNode.__init__(self, **properties)
 
         self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
+        if self._path and ".data" in self._path:
+            self._path = self._migrate_path(self.storage_type(), self._path)
+
         if not self._path:
             self._path = self._build_path(self.storage_type())
         properties[self.__PATH_KEY] = self._path
 
-        if not self._last_edit_date and isfile(self._path):
-            self._last_edit_date = datetime.now()
         if default_value is not None and not os.path.exists(self._path):
             self._write(default_value)
             self._last_edit_date = datetime.now()
@@ -142,6 +143,8 @@ class CSVDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
                     }
                 )
             )
+        if not self._last_edit_date and isfile(self._path):
+            self._last_edit_date = datetime.now()
 
         self._TAIPY_PROPERTIES.update(
             {

+ 3 - 0
taipy/core/data/excel.py

@@ -126,6 +126,9 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
             **properties,
         )
         _AbstractTabularDataNode.__init__(self, **properties)
+        if self._path and ".data" in self._path:
+            self._path = self._migrate_path(self.storage_type(), self._path)
+
         if not self._path:
             self._path = self._build_path(self.storage_type())
             properties[self.__PATH_KEY] = self._path

+ 3 - 0
taipy/core/data/json.py

@@ -111,6 +111,9 @@ class JSONDataNode(DataNode, _AbstractFileDataNode):
             **properties,
         )
         self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
+        if self._path and ".data" in self._path:
+            self._path = self._migrate_path(self.storage_type(), self._path)
+
         if not self._path:
             self._path = self._build_path(self.storage_type())
         properties[self.__PATH_KEY] = self._path

+ 4 - 0
taipy/core/data/parquet.py

@@ -159,8 +159,12 @@ class ParquetDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode)
             **properties,
         )
         self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
+
+        if self._path and ".data" in self._path:
+            self._path = self._migrate_path(self.storage_type(), self._path)
         if not self._path:
             self._path = self._build_path(self.storage_type())
+
         properties[self.__PATH_KEY] = self._path
 
         if default_value is not None and not os.path.exists(self._path):

+ 6 - 2
taipy/core/data/pickle.py

@@ -102,10 +102,12 @@ class PickleDataNode(DataNode, _AbstractFileDataNode):
             editor_expiration_date,
             **properties,
         )
+        if self._path and ".data" in self._path:
+            self._path = self._migrate_path(self.storage_type(), self._path)
+
         if self._path is None:
             self._path = self._build_path(self.storage_type())
-        if not self._last_edit_date and os.path.exists(self._path):
-            self._last_edit_date = datetime.now()
+
         if default_value is not None and not os.path.exists(self._path):
             self._write(default_value)
             self._last_edit_date = datetime.now()
@@ -118,6 +120,8 @@ class PickleDataNode(DataNode, _AbstractFileDataNode):
                     }
                 )
             )
+        if not self._last_edit_date and os.path.exists(self._path):
+            self._last_edit_date = datetime.now()
 
         self._TAIPY_PROPERTIES.update(
             {

+ 2 - 0
taipy/gui/.gitignore

@@ -128,6 +128,8 @@ Dockerfile.dev
 
 # Filesystem default local storage
 .data/
+user_data/
+.taipy/
 
 # Python notebook
 *.ipynb

+ 2 - 0
taipy/templates/.gitignore

@@ -18,6 +18,8 @@ dist/
 
 # Core .data directory
 .data/
+user_data/
+.taipy/
 
 # demo files
 demo-*

+ 3 - 1
taipy/templates/scenario-management/{{cookiecutter.__root_folder_name}}/.gitignore

@@ -1,2 +1,4 @@
 # Taipy
-.data
+.data/
+user_data/
+.taipy/

+ 1 - 1
tests/core/_entity/test_migrate_cli.py

@@ -36,7 +36,7 @@ def test_migrate_fs_default(caplog):
     with pytest.raises(SystemExit):
         with patch("sys.argv", ["prog", "migrate", "--repository-type", "filesystem", "--skip-backup"]):
             _MigrateCLI.parse_arguments()
-    assert "Starting entity migration from '.data' folder" in caplog.text
+    assert "Starting entity migration from '.taipy/' folder" in caplog.text
 
 
 def test_migrate_fs_specified_folder(caplog):

+ 4 - 2
tests/core/config/test_config_serialization.py

@@ -117,7 +117,8 @@ max_nb_of_workers = "1:int"
 
 [CORE]
 root_folder = "./taipy/"
-storage_folder = ".data/"
+storage_folder = "user_data/"
+taipy_storage_folder = ".taipy/"
 repository_type = "filesystem"
 read_entity_retry = "0:int"
 mode = "development"
@@ -289,7 +290,8 @@ def test_read_write_json_configuration_file():
 },
 "CORE": {
 "root_folder": "./taipy/",
-"storage_folder": ".data/",
+"storage_folder": "user_data/",
+"taipy_storage_folder": ".taipy/",
 "repository_type": "filesystem",
 "read_entity_retry": "0:int",
 "mode": "development",

+ 2 - 1
tests/core/config/test_default_config.py

@@ -31,7 +31,8 @@ def _test_default_core_section(core_section: CoreSection):
     assert core_section.version_number == ""
     assert not core_section.force
     assert core_section.root_folder == "./taipy/"
-    assert core_section.storage_folder == ".data/"
+    assert core_section.storage_folder == "user_data/"
+    assert core_section.taipy_storage_folder == ".taipy/"
     assert core_section.repository_type == "filesystem"
     assert core_section.repository_properties == {}
     assert len(core_section.properties) == 0

+ 2 - 1
tests/core/config/test_file_config.py

@@ -31,7 +31,8 @@ max_nb_of_workers = "2:int"
 
 [CORE]
 root_folder = "./taipy/"
-storage_folder = ".data/"
+storage_folder = "user_data/"
+taipy_storage_folder = ".taipy/"
 repository_type = "filesystem"
 read_entity_retry = "0:int"
 mode = "development"

+ 4 - 0
tests/core/conftest.py

@@ -182,6 +182,10 @@ def cleanup_files():
 
     if os.path.exists(".data"):
         shutil.rmtree(".data", ignore_errors=True)
+    if os.path.exists("user_data"):
+        shutil.rmtree("user_data", ignore_errors=True)
+    if os.path.exists(".taipy"):
+        shutil.rmtree(".taipy", ignore_errors=True)
     if os.path.exists(".my_data"):
         shutil.rmtree(".my_data", ignore_errors=True)
 

+ 13 - 0
tests/core/data/test_csv_data_node.py

@@ -154,3 +154,16 @@ class TestCSVDataNode:
         dn.write(pd.DataFrame([7, 8, 9]))
         assert new_edit_date < dn.last_edit_date
         os.unlink(temp_file_path)
+
+    def test_migrate_to_new_path(self, tmp_path):
+        _base_path = os.path.join(tmp_path, ".data")
+        path = os.path.join(_base_path, "test.csv")
+        # create a file on old path
+        os.mkdir(_base_path)
+        with open(path, "w"):
+            pass
+
+        dn = CSVDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
+
+        assert ".data" not in dn.path.name
+        assert os.path.exists(dn.path)

+ 13 - 0
tests/core/data/test_excel_data_node.py

@@ -350,3 +350,16 @@ class TestExcelDataNode:
         dn.write(pd.DataFrame([7, 8, 9]))
         assert new_edit_date < dn.last_edit_date
         os.unlink(temp_file_path)
+
+    def test_migrate_to_new_path(self, tmp_path):
+        _base_path = os.path.join(tmp_path, ".data")
+        path = os.path.join(_base_path, "test.xlsx")
+        # create a file on old path
+        os.mkdir(_base_path)
+        with open(path, "w"):
+            pass
+
+        dn = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
+
+        assert ".data" not in dn.path.name
+        assert os.path.exists(dn.path)

+ 13 - 0
tests/core/data/test_json_data_node.py

@@ -355,3 +355,16 @@ class TestJSONDataNode:
         dn.write([1, 2, 3])
         assert new_edit_date < dn.last_edit_date
         os.unlink(temp_file_path)
+
+    def test_migrate_to_new_path(self, tmp_path):
+        _base_path = os.path.join(tmp_path, ".data")
+        path = os.path.join(_base_path, "test.json")
+        # create a file on old path
+        os.mkdir(_base_path)
+        with open(path, "w"):
+            pass
+
+        dn = JSONDataNode("foo", Scope.SCENARIO, properties={"path": path})
+
+        assert ".data" not in dn.path.name
+        assert os.path.exists(dn.path)

+ 13 - 0
tests/core/data/test_parquet_data_node.py

@@ -533,3 +533,16 @@ class TestParquetDataNode:
         path = "data/node/path"
         dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path})
         assert dn.read_with_kwargs() is None
+
+    def test_migrate_to_new_path(self, tmp_path):
+        _base_path = os.path.join(tmp_path, ".data")
+        path = os.path.join(_base_path, "test.parquet")
+        # create a file on old path
+        os.mkdir(_base_path)
+        with open(path, "w"):
+            pass
+
+        dn = ParquetDataNode("foo_bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
+
+        assert ".data" not in dn.path.name
+        assert os.path.exists(dn.path)

+ 13 - 0
tests/core/data/test_pickle_data_node.py

@@ -179,3 +179,16 @@ class TestPickleDataNodeEntity:
         dn.write(pd.DataFrame([7, 8, 9]))
         assert new_edit_date < dn.last_edit_date
         os.unlink(temp_file_path)
+
+    def test_migrate_to_new_path(self, tmp_path):
+        _base_path = os.path.join(tmp_path, ".data")
+        path = os.path.join(_base_path, "test.p")
+        # create a file on old path
+        os.mkdir(_base_path)
+        with open(path, "w"):
+            pass
+
+        dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar", "path": path})
+
+        assert ".data" not in dn.path.name
+        assert os.path.exists(dn.path)

+ 1 - 1
tests/core/test_taipy.py

@@ -713,7 +713,7 @@ class TestTaipy:
         assert sorted(os.listdir("./tmp/exp_scenario_1/cycles")) == sorted([f"{scenario_2.cycle.id}.json"])
 
         with pytest.raises(InvalidExportPath):
-            tp.export_scenario(scenario_2.id, Config.core.storage_folder)
+            tp.export_scenario(scenario_2.id, Config.core.taipy_storage_folder)
 
         shutil.rmtree("./tmp", ignore_errors=True)