瀏覽代碼

merge develop

namnguyen 10 月之前
父節點
當前提交
ed3c3d1526
共有 95 個文件被更改,包括 1577 次插入5183 次删除
  1. 83 0
      .github/workflows/prebuild.yml
  2. 1 1
      README.md
  3. 15 6
      doc/gui/examples/controls/text-format.py
  4. 35 0
      doc/gui/examples/controls/text-md.py
  5. 21 6
      doc/gui/examples/controls/text-pre.py
  6. 15 3
      doc/gui/examples/controls/text-simple.py
  7. 219 236
      frontend/taipy-gui/package-lock.json
  8. 1 0
      frontend/taipy-gui/packaging/taipy-gui.d.ts
  9. 16 11
      frontend/taipy-gui/src/components/Taipy/Chart.tsx
  10. 40 14
      frontend/taipy-gui/src/components/Taipy/Dialog.spec.tsx
  11. 17 5
      frontend/taipy-gui/src/components/Taipy/TableFilter.spec.tsx
  12. 2 0
      frontend/taipy-gui/src/components/Taipy/TableFilter.tsx
  13. 22 21
      frontend/taipy-gui/src/context/taipyReducers.spec.ts
  14. 179 173
      frontend/taipy/package-lock.json
  15. 6 6
      frontend/taipy/src/CoreSelector.tsx
  16. 22 5
      frontend/taipy/src/ScenarioViewer.tsx
  17. 5 5
      taipy/_cli/_create_cli.py
  18. 4 4
      taipy/_entrypoint.py
  19. 0 1
      taipy/core/_entity/_migrate/__init__.py
  20. 0 205
      taipy/core/_entity/_migrate/_migrate_sql.py
  21. 2 15
      taipy/core/_entity/_migrate_cli.py
  22. 0 4
      taipy/core/_repository/_base_taipy_model.py
  23. 0 236
      taipy/core/_repository/_sql_repository.py
  24. 0 10
      taipy/core/_repository/db/__init__.py
  25. 0 77
      taipy/core/_repository/db/_sql_connection.py
  26. 1 2
      taipy/core/_version/_version_manager_factory.py
  27. 1 25
      taipy/core/_version/_version_model.py
  28. 0 84
      taipy/core/_version/_version_sql_repository.py
  29. 2 5
      taipy/core/cycle/_cycle_manager_factory.py
  30. 0 15
      taipy/core/cycle/_cycle_model.py
  31. 2 5
      taipy/core/data/_data_manager_factory.py
  32. 0 25
      taipy/core/data/_data_model.py
  33. 2 5
      taipy/core/job/_job_manager_factory.py
  34. 0 18
      taipy/core/job/_job_model.py
  35. 0 18
      taipy/core/job/_job_sql_repository.py
  36. 4 5
      taipy/core/scenario/_scenario_manager.py
  37. 1 2
      taipy/core/scenario/_scenario_manager_factory.py
  38. 0 20
      taipy/core/scenario/_scenario_model.py
  39. 0 18
      taipy/core/scenario/_scenario_sql_repository.py
  40. 1 2
      taipy/core/submission/_submission_manager_factory.py
  41. 0 23
      taipy/core/submission/_submission_model.py
  42. 0 18
      taipy/core/submission/_submission_sql_repository.py
  43. 2 2
      taipy/core/taipy.py
  44. 2 5
      taipy/core/task/_task_manager_factory.py
  45. 0 19
      taipy/core/task/_task_model.py
  46. 0 18
      taipy/core/task/_task_sql_repository.py
  47. 2 0
      taipy/gui/.gitignore
  48. 2 2
      taipy/gui/__init__.py
  49. 22 11
      taipy/gui/_renderers/builder.py
  50. 1 1
      taipy/gui/builder/_utils.py
  51. 20 13
      taipy/gui/data/array_dict_data_accessor.py
  52. 2 2
      taipy/gui/data/comparison.py
  53. 58 41
      taipy/gui/data/data_accessor.py
  54. 1 0
      taipy/gui/data/data_format.py
  55. 7 16
      taipy/gui/data/numpy_data_accessor.py
  56. 96 68
      taipy/gui/data/pandas_data_accessor.py
  57. 27 34
      taipy/gui/gui.py
  58. 2 2
      taipy/gui/icon.py
  59. 3 3
      taipy/gui/partial.py
  60. 1 0
      taipy/gui/types.py
  61. 6 6
      taipy/gui/viselements.json
  62. 2 2
      taipy/gui_core/_GuiCoreLib.py
  63. 1 0
      taipy/gui_core/__init__.py
  64. 153 106
      taipy/gui_core/_adapters.py
  65. 29 12
      taipy/gui_core/_context.py
  66. 8 8
      taipy/gui_core/viselements.json
  67. 2 2
      taipy/templates/README.md
  68. 2 2
      taipy/templates/package_desc.md
  69. 1 1
      tests/cli/test_help_cli.py
  70. 0 99
      tests/core/_entity/test_migrate_cli.py
  71. 0 16
      tests/core/conftest.py
  72. 0 247
      tests/core/cycle/test_cycle_manager_with_sql_repo.py
  73. 19 29
      tests/core/cycle/test_cycle_repositories.py
  74. 0 288
      tests/core/data/test_data_manager_with_sql_repo.py
  75. 20 21
      tests/core/data/test_data_repositories.py
  76. 0 255
      tests/core/job/test_job_manager_with_sql_repo.py
  77. 41 52
      tests/core/job/test_job_repositories.py
  78. 1 28
      tests/core/repository/mocks.py
  79. 59 0
      tests/core/repository/test_base_model.py
  80. 8 15
      tests/core/repository/test_repositories.py
  81. 10 1
      tests/core/scenario/test_scenario_manager.py
  82. 0 490
      tests/core/scenario/test_scenario_manager_with_sql_repo.py
  83. 21 32
      tests/core/scenario/test_scenario_repositories.py
  84. 0 274
      tests/core/sequence/test_sequence_manager_with_sql_repo.py
  85. 0 180
      tests/core/submission/test_submission_manager_with_sql_repo.py
  86. 9 45
      tests/core/submission/test_submission_repositories.py
  87. 0 389
      tests/core/task/test_task_manager_with_sql_repo.py
  88. 29 55
      tests/core/task/test_task_repositories.py
  89. 0 567
      tests/core/test_core_cli_with_sql_repo.py
  90. 0 306
      tests/core/version/test_version_cli_with_sql_repo.py
  91. 19 29
      tests/core/version/test_version_repositories.py
  92. 80 23
      tests/gui/data/test_array_dict_data_accessor.py
  93. 86 28
      tests/gui/data/test_pandas_data_accessor.py
  94. 3 3
      tests/templates/test_template_cli.py
  95. 1 1
      tools/gui/generate_pyi.py

+ 83 - 0
.github/workflows/prebuild.yml

@@ -0,0 +1,83 @@
+name: Generate prebuild for develop branch
+
+on:
+  push:
+    branches: [develop]
+  workflow_dispatch:
+
+permissions:
+  contents: write
+
+env:
+  NODE_OPTIONS: --max-old-space-size=4096
+
+jobs:
+  reset-prebuild:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Reset prebuild branch to develop
+        uses: nicksnell/action-reset-repo@master
+        with:
+          base_branch: develop
+          reset_branch: prebuild
+
+  prebuild:
+    needs: reset-prebuild
+    timeout-minutes: 20
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          ref: develop
+
+      - uses: actions/setup-python@v5
+        with:
+          python-version: "3.11"
+
+      - name: Hash frontend source code
+        id: hash-frontend
+        run: |
+          python tools/frontend/hash_source.py
+          echo "HASH=$(cat hash.txt)" >> $GITHUB_OUTPUT
+          rm hash.txt
+        shell: bash
+
+      - name: Restore cached frontend build
+        id: cache-fe-build
+        uses: actions/cache@v4
+        with:
+          path: |
+            taipy/gui/webapp
+            taipy/gui_core/lib
+          key: frontend-build-${{ runner.os }}-${{ steps.hash-frontend.outputs.HASH }}
+
+      - name: Setup node
+        if: steps.cache-fe-build.outputs.cache-hit != 'true'
+        uses: actions/setup-node@v4
+        with:
+          node-version: "20"
+          cache: "npm"
+          cache-dependency-path: "**/package-lock.json"
+
+      - name: Frontend Bundle Build
+        if: steps.cache-fe-build.outputs.cache-hit != 'true'
+        run: python tools/frontend/bundle_build.py
+
+      - name: Reset npm dependencies
+        if: steps.cache-fe-build.outputs.cache-hit != 'true'
+        run: |
+          git checkout -- frontend/taipy/package.json
+          git checkout -- frontend/taipy/package-lock.json
+          git checkout -- frontend/taipy-gui/package-lock.json
+
+      - name: Add frontend build force to bypass gitignore
+        run: |
+          git add -f taipy/gui/webapp
+          git add -f taipy/gui_core/lib
+
+      - name: Commit prebuild changes
+        uses: stefanzweifel/git-auto-commit-action@v5
+        with:
+          branch: prebuild
+          add_options: "-u"
+          push_options: "--force"

+ 1 - 1
README.md

@@ -112,7 +112,7 @@ Your configuration is automatically saved as a TOML file.<br />
 Check out Taipy Studio [Documentation](https://docs.taipy.io/en/latest/manuals/studio/)
 
 For more advanced use cases or if you prefer coding your configurations instead of using Taipy Studio,<br />
-Check out the movie genre demo scenario creation with this [Demo](https://docs.taipy.io/en/latest/knowledge_base/demos/movie_genre_selector/).
+Check out the movie genre demo scenario creation with this [Demo](https://docs.taipy.io/en/latest/gallery/other/movie_genre_selector/).
 
 ![TaipyStudio](https://github.com/Avaiga/taipy/raw/develop/readme_img/readme_demo_studio.gif)
 

+ 15 - 6
taipy/core/cycle/_cycle_sql_repository.py → doc/gui/examples/controls/text-format.py

@@ -8,11 +8,20 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
-from .._repository._sql_repository import _SQLRepository
-from ._cycle_converter import _CycleConverter
-from ._cycle_model import _CycleModel
+# -----------------------------------------------------------------------------------------
+# To execute this script, make sure that the taipy-gui package is installed in your
+# Python environment and run:
+#     python <script>
+# -----------------------------------------------------------------------------------------
+from taipy.gui import Gui
 
+pi = 3.14159265358979
 
-class _CycleSQLRepository(_SQLRepository):
-    def __init__(self) -> None:
-        super().__init__(model_type=_CycleModel, converter=_CycleConverter)
+page = """
+# Text - Formatting
+<|toggle|theme|>
+
+π≈<|{pi}|text|format=%.3f|>
+"""
+
+Gui(page).run()

+ 35 - 0
doc/gui/examples/controls/text-md.py

@@ -0,0 +1,35 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+# -----------------------------------------------------------------------------------------
+# To execute this script, make sure that the taipy-gui package is installed in your
+# Python environment and run:
+#     python <script>
+# -----------------------------------------------------------------------------------------
+from taipy.gui import Gui
+
+markdown = """
+# Generated by *Taipy*
+
+You can insert *Markdown* code in a `text` control to
+add style to the text.
+
+If a line ends with two white spaces, such as here
+then you can create line skips.
+""" # noqa W291
+
+page = """
+# Text - Markdown
+<|toggle|theme|>
+
+<|{markdown}|text|mode=markdown|>
+"""
+
+Gui(page).run()

+ 21 - 6
taipy/core/data/_data_sql_repository.py → doc/gui/examples/controls/text-pre.py

@@ -8,11 +8,26 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
-from .._repository._sql_repository import _SQLRepository
-from ._data_converter import _DataNodeConverter
-from ._data_model import _DataNodeModel
+# -----------------------------------------------------------------------------------------
+# To execute this script, make sure that the taipy-gui package is installed in your
+# Python environment and run:
+#     python <script>
+# -----------------------------------------------------------------------------------------
+from taipy.gui import Gui
 
+code = """
+def say_hello(name: str):
+    print(f"Hello, {name}!")
 
-class _DataSQLRepository(_SQLRepository):
-    def __init__(self) -> None:
-        super().__init__(model_type=_DataNodeModel, converter=_DataNodeConverter)
+if __name__ == "__main__":
+    say_hello("Taipy")
+"""
+
+page = """
+# Text - pre
+<|toggle|theme|>
+
+<|{code}|text|mode=pre|>
+"""
+
+Gui(page).run()

+ 15 - 3
taipy/core/_repository/db/_sql_base_model.py → doc/gui/examples/controls/text-simple.py

@@ -8,8 +8,20 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
+# -----------------------------------------------------------------------------------------
+# To execute this script, make sure that the taipy-gui package is installed in your
+# Python environment and run:
+#     python <script>
+# -----------------------------------------------------------------------------------------
+from taipy.gui import Gui
 
-from sqlalchemy.orm import declarative_base, registry
+name = "Taipy"
 
-_SQLBaseModel = declarative_base()
-mapper_registry = registry()
+page = """
+# Text - simple
+<|toggle|theme|>
+
+<|Hello {name}!|>
+"""
+
+Gui(page).run()

+ 219 - 236
frontend/taipy-gui/package-lock.json

@@ -656,15 +656,15 @@
       }
     },
     "node_modules/@emotion/babel-plugin": {
-      "version": "11.11.0",
-      "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz",
-      "integrity": "sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ==",
+      "version": "11.12.0",
+      "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.12.0.tgz",
+      "integrity": "sha512-y2WQb+oP8Jqvvclh8Q55gLUyb7UFvgv7eJfsj7td5TToBrIUtPay2kMrZi4xjq9qw2vD0ZR5fSho0yqoFgX7Rw==",
       "dependencies": {
         "@babel/helper-module-imports": "^7.16.7",
         "@babel/runtime": "^7.18.3",
-        "@emotion/hash": "^0.9.1",
-        "@emotion/memoize": "^0.8.1",
-        "@emotion/serialize": "^1.1.2",
+        "@emotion/hash": "^0.9.2",
+        "@emotion/memoize": "^0.9.0",
+        "@emotion/serialize": "^1.2.0",
         "babel-plugin-macros": "^3.1.0",
         "convert-source-map": "^1.5.0",
         "escape-string-regexp": "^4.0.0",
@@ -674,47 +674,47 @@
       }
     },
     "node_modules/@emotion/cache": {
-      "version": "11.11.0",
-      "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.11.0.tgz",
-      "integrity": "sha512-P34z9ssTCBi3e9EI1ZsWpNHcfY1r09ZO0rZbRO2ob3ZQMnFI35jB536qoXbkdesr5EUhYi22anuEJuyxifaqAQ==",
-      "dependencies": {
-        "@emotion/memoize": "^0.8.1",
-        "@emotion/sheet": "^1.2.2",
-        "@emotion/utils": "^1.2.1",
-        "@emotion/weak-memoize": "^0.3.1",
+      "version": "11.13.0",
+      "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.13.0.tgz",
+      "integrity": "sha512-hPV345J/tH0Cwk2wnU/3PBzORQ9HeX+kQSbwI+jslzpRCHE6fSGTohswksA/Ensr8znPzwfzKZCmAM9Lmlhp7g==",
+      "dependencies": {
+        "@emotion/memoize": "^0.9.0",
+        "@emotion/sheet": "^1.4.0",
+        "@emotion/utils": "^1.4.0",
+        "@emotion/weak-memoize": "^0.4.0",
         "stylis": "4.2.0"
       }
     },
     "node_modules/@emotion/hash": {
-      "version": "0.9.1",
-      "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.1.tgz",
-      "integrity": "sha512-gJB6HLm5rYwSLI6PQa+X1t5CFGrv1J1TWG+sOyMCeKz2ojaj6Fnl/rZEspogG+cvqbt4AE/2eIyD2QfLKTBNlQ=="
+      "version": "0.9.2",
+      "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz",
+      "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g=="
     },
     "node_modules/@emotion/is-prop-valid": {
-      "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.2.2.tgz",
-      "integrity": "sha512-uNsoYd37AFmaCdXlg6EYD1KaPOaRWRByMCYzbKUX4+hhMfrxdVSelShywL4JVaAeM/eHUOSprYBQls+/neX3pw==",
+      "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.3.0.tgz",
+      "integrity": "sha512-SHetuSLvJDzuNbOdtPVbq6yMMMlLoW5Q94uDqJZqy50gcmAjxFkVqmzqSGEFq9gT2iMuIeKV1PXVWmvUhuZLlQ==",
       "dependencies": {
-        "@emotion/memoize": "^0.8.1"
+        "@emotion/memoize": "^0.9.0"
       }
     },
     "node_modules/@emotion/memoize": {
-      "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz",
-      "integrity": "sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA=="
+      "version": "0.9.0",
+      "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz",
+      "integrity": "sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ=="
     },
     "node_modules/@emotion/react": {
-      "version": "11.11.4",
-      "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.11.4.tgz",
-      "integrity": "sha512-t8AjMlF0gHpvvxk5mAtCqR4vmxiGHCeJBaQO6gncUSdklELOgtwjerNY2yuJNfwnc6vi16U/+uMF+afIawJ9iw==",
+      "version": "11.13.0",
+      "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.13.0.tgz",
+      "integrity": "sha512-WkL+bw1REC2VNV1goQyfxjx1GYJkcc23CRQkXX+vZNLINyfI7o+uUn/rTGPt/xJ3bJHd5GcljgnxHf4wRw5VWQ==",
       "dependencies": {
         "@babel/runtime": "^7.18.3",
-        "@emotion/babel-plugin": "^11.11.0",
-        "@emotion/cache": "^11.11.0",
-        "@emotion/serialize": "^1.1.3",
-        "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1",
-        "@emotion/utils": "^1.2.1",
-        "@emotion/weak-memoize": "^0.3.1",
+        "@emotion/babel-plugin": "^11.12.0",
+        "@emotion/cache": "^11.13.0",
+        "@emotion/serialize": "^1.3.0",
+        "@emotion/use-insertion-effect-with-fallbacks": "^1.1.0",
+        "@emotion/utils": "^1.4.0",
+        "@emotion/weak-memoize": "^0.4.0",
         "hoist-non-react-statics": "^3.3.1"
       },
       "peerDependencies": {
@@ -727,33 +727,33 @@
       }
     },
     "node_modules/@emotion/serialize": {
-      "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.1.4.tgz",
-      "integrity": "sha512-RIN04MBT8g+FnDwgvIUi8czvr1LU1alUMI05LekWB5DGyTm8cCBMCRpq3GqaiyEDRptEXOyXnvZ58GZYu4kBxQ==",
+      "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.3.0.tgz",
+      "integrity": "sha512-jACuBa9SlYajnpIVXB+XOXnfJHyckDfe6fOpORIM6yhBDlqGuExvDdZYHDQGoDf3bZXGv7tNr+LpLjJqiEQ6EA==",
       "dependencies": {
-        "@emotion/hash": "^0.9.1",
-        "@emotion/memoize": "^0.8.1",
-        "@emotion/unitless": "^0.8.1",
-        "@emotion/utils": "^1.2.1",
+        "@emotion/hash": "^0.9.2",
+        "@emotion/memoize": "^0.9.0",
+        "@emotion/unitless": "^0.9.0",
+        "@emotion/utils": "^1.4.0",
         "csstype": "^3.0.2"
       }
     },
     "node_modules/@emotion/sheet": {
-      "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.2.2.tgz",
-      "integrity": "sha512-0QBtGvaqtWi+nx6doRwDdBIzhNdZrXUppvTM4dtZZWEGTXL/XE/yJxLMGlDT1Gt+UHH5IX1n+jkXyytE/av7OA=="
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.4.0.tgz",
+      "integrity": "sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg=="
     },
     "node_modules/@emotion/styled": {
-      "version": "11.11.5",
-      "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.11.5.tgz",
-      "integrity": "sha512-/ZjjnaNKvuMPxcIiUkf/9SHoG4Q196DRl1w82hQ3WCsjo1IUR8uaGWrC6a87CrYAW0Kb/pK7hk8BnLgLRi9KoQ==",
+      "version": "11.13.0",
+      "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.13.0.tgz",
+      "integrity": "sha512-tkzkY7nQhW/zC4hztlwucpT8QEZ6eUzpXDRhww/Eej4tFfO0FxQYWRyg/c5CCXa4d/f174kqeXYjuQRnhzf6dA==",
       "dependencies": {
         "@babel/runtime": "^7.18.3",
-        "@emotion/babel-plugin": "^11.11.0",
-        "@emotion/is-prop-valid": "^1.2.2",
-        "@emotion/serialize": "^1.1.4",
-        "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1",
-        "@emotion/utils": "^1.2.1"
+        "@emotion/babel-plugin": "^11.12.0",
+        "@emotion/is-prop-valid": "^1.3.0",
+        "@emotion/serialize": "^1.3.0",
+        "@emotion/use-insertion-effect-with-fallbacks": "^1.1.0",
+        "@emotion/utils": "^1.4.0"
       },
       "peerDependencies": {
         "@emotion/react": "^11.0.0-rc.0",
@@ -766,27 +766,27 @@
       }
     },
     "node_modules/@emotion/unitless": {
-      "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz",
-      "integrity": "sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ=="
+      "version": "0.9.0",
+      "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.9.0.tgz",
+      "integrity": "sha512-TP6GgNZtmtFaFcsOgExdnfxLLpRDla4Q66tnenA9CktvVSdNKDvMVuUah4QvWPIpNjrWsGg3qeGo9a43QooGZQ=="
     },
     "node_modules/@emotion/use-insertion-effect-with-fallbacks": {
-      "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz",
-      "integrity": "sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw==",
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.1.0.tgz",
+      "integrity": "sha512-+wBOcIV5snwGgI2ya3u99D7/FJquOIniQT1IKyDsBmEgwvpxMNeS65Oib7OnE2d2aY+3BU4OiH+0Wchf8yk3Hw==",
       "peerDependencies": {
         "react": ">=16.8.0"
       }
     },
     "node_modules/@emotion/utils": {
-      "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.2.1.tgz",
-      "integrity": "sha512-Y2tGf3I+XVnajdItskUCn6LX+VUDmP6lTL4fcqsXAv43dnlbZiuW4MWQW38rW/BVWSE7Q/7+XQocmpnRYILUmg=="
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.4.0.tgz",
+      "integrity": "sha512-spEnrA1b6hDR/C68lC2M7m6ALPUHZC0lIY7jAS/B/9DuuO1ZP04eov8SMv/6fwRd8pzmsn2AuJEznRREWlQrlQ=="
     },
     "node_modules/@emotion/weak-memoize": {
-      "version": "0.3.1",
-      "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz",
-      "integrity": "sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww=="
+      "version": "0.4.0",
+      "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz",
+      "integrity": "sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg=="
     },
     "node_modules/@eslint-community/eslint-utils": {
       "version": "4.4.0",
@@ -894,20 +894,20 @@
       }
     },
     "node_modules/@floating-ui/core": {
-      "version": "1.6.4",
-      "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.4.tgz",
-      "integrity": "sha512-a4IowK4QkXl4SCWTGUR0INAfEOX3wtsYw3rKK5InQEHMGObkR8Xk44qYQD9P4r6HHw0iIfK6GUKECmY8sTkqRA==",
+      "version": "1.6.5",
+      "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.5.tgz",
+      "integrity": "sha512-8GrTWmoFhm5BsMZOTHeGD2/0FLKLQQHvO/ZmQga4tKempYRLz8aqJGqXVuQgisnMObq2YZ2SgkwctN1LOOxcqA==",
       "dependencies": {
-        "@floating-ui/utils": "^0.2.4"
+        "@floating-ui/utils": "^0.2.5"
       }
     },
     "node_modules/@floating-ui/dom": {
-      "version": "1.6.7",
-      "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.7.tgz",
-      "integrity": "sha512-wmVfPG5o2xnKDU4jx/m4w5qva9FWHcnZ8BvzEe90D/RpwsJaTAVYPEPdQ8sbr/N8zZTAHlZUTQdqg8ZUbzHmng==",
+      "version": "1.6.8",
+      "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.8.tgz",
+      "integrity": "sha512-kx62rP19VZ767Q653wsP1XZCGIirkE09E0QUGNYTM/ttbbQHqcGPdSfWFxUyyNLc/W6aoJRBajOSXhP6GXjC0Q==",
       "dependencies": {
         "@floating-ui/core": "^1.6.0",
-        "@floating-ui/utils": "^0.2.4"
+        "@floating-ui/utils": "^0.2.5"
       }
     },
     "node_modules/@floating-ui/react-dom": {
@@ -923,9 +923,9 @@
       }
     },
     "node_modules/@floating-ui/utils": {
-      "version": "0.2.4",
-      "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.4.tgz",
-      "integrity": "sha512-dWO2pw8hhi+WrXq1YJy2yCuWoL20PddgGaqTgVe4cOS9Q6qklXCiA1tJEqX6BEwRNSCP84/afac9hd4MS+zEUA=="
+      "version": "0.2.5",
+      "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.5.tgz",
+      "integrity": "sha512-sTcG+QZ6fdEUObICavU+aB3Mp8HY4n14wYHdxK4fXjPmv3PXZZeY5RaguJmGyeH/CJQhX3fqKUtS4qc1LoHwhQ=="
     },
     "node_modules/@humanwhocodes/config-array": {
       "version": "0.11.14",
@@ -2162,14 +2162,14 @@
       }
     },
     "node_modules/@mui/x-date-pickers": {
-      "version": "7.10.0",
-      "resolved": "https://registry.npmjs.org/@mui/x-date-pickers/-/x-date-pickers-7.10.0.tgz",
-      "integrity": "sha512-mfJuKOdrrdlH5FskXl0aypRmZuVctNRwn5Xw0aMgE3n1ORCpzDSGCXd5El1/PdH3/3olT+vPFmxXKMQju5UMow==",
+      "version": "7.11.0",
+      "resolved": "https://registry.npmjs.org/@mui/x-date-pickers/-/x-date-pickers-7.11.0.tgz",
+      "integrity": "sha512-+zPWs1dwe7J1nZ2iFhTgCae31BLMYMQ2VtQfHxx21Dh6gbBRy/U7YJZg1LdhfQyE093S3e4A5uMZ6PUWdne7iA==",
       "dependencies": {
-        "@babel/runtime": "^7.24.7",
+        "@babel/runtime": "^7.24.8",
         "@mui/base": "^5.0.0-beta.40",
-        "@mui/system": "^5.16.0",
-        "@mui/utils": "^5.16.0",
+        "@mui/system": "^5.16.2",
+        "@mui/utils": "^5.16.2",
         "@types/react-transition-group": "^4.4.10",
         "clsx": "^2.1.1",
         "prop-types": "^15.8.1",
@@ -2226,15 +2226,35 @@
         }
       }
     },
+    "node_modules/@mui/x-internals": {
+      "version": "7.11.0",
+      "resolved": "https://registry.npmjs.org/@mui/x-internals/-/x-internals-7.11.0.tgz",
+      "integrity": "sha512-GqCYylKiB4cLH9tK4JweJlT2JvPjnpXjS3TEIqtHB4BcSsezhdRrMGzHOO5zCJqkasqTirJh2t6X16Qw1llr4Q==",
+      "dependencies": {
+        "@babel/runtime": "^7.24.8",
+        "@mui/utils": "^5.16.2"
+      },
+      "engines": {
+        "node": ">=14.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/mui-org"
+      },
+      "peerDependencies": {
+        "react": "^17.0.0 || ^18.0.0"
+      }
+    },
     "node_modules/@mui/x-tree-view": {
-      "version": "7.10.0",
-      "resolved": "https://registry.npmjs.org/@mui/x-tree-view/-/x-tree-view-7.10.0.tgz",
-      "integrity": "sha512-9OCAIb0wS5uuEDyjcSwSturrB4RUXBfE0UO/xpKjrMvRzCaAvxbCf2aFILP8uH9NyynYZkIGYfGnlqdAPy2OLg==",
+      "version": "7.11.0",
+      "resolved": "https://registry.npmjs.org/@mui/x-tree-view/-/x-tree-view-7.11.0.tgz",
+      "integrity": "sha512-/nk3hhTW5c4Uk2MIcIujC6w5/e5m8RbfWY0YTfRdHApmcFjeEZDX7O5pky5DojhaALopDuNebr9PlE8QYloaiw==",
       "dependencies": {
-        "@babel/runtime": "^7.24.7",
+        "@babel/runtime": "^7.24.8",
         "@mui/base": "^5.0.0-beta.40",
-        "@mui/system": "^5.16.0",
-        "@mui/utils": "^5.16.0",
+        "@mui/system": "^5.16.2",
+        "@mui/utils": "^5.16.2",
+        "@mui/x-internals": "7.11.0",
         "@types/react-transition-group": "^4.4.10",
         "clsx": "^2.1.1",
         "prop-types": "^15.8.1",
@@ -2383,9 +2403,9 @@
       }
     },
     "node_modules/@shikijs/core": {
-      "version": "1.10.3",
-      "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.10.3.tgz",
-      "integrity": "sha512-D45PMaBaeDHxww+EkcDQtDAtzv00Gcsp72ukBtaLSmqRvh0WgGMq3Al0rl1QQBZfuneO75NXMIzEZGFitThWbg==",
+      "version": "1.11.1",
+      "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.11.1.tgz",
+      "integrity": "sha512-Qsn8h15SWgv5TDRoDmiHNzdQO2BxDe86Yq6vIHf5T0cCvmfmccJKIzHtep8bQO9HMBZYCtCBzaXdd1MnxZBPSg==",
       "dev": true,
       "dependencies": {
         "@types/hast": "^3.0.4"
@@ -2433,9 +2453,9 @@
       "integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="
     },
     "node_modules/@testing-library/dom": {
-      "version": "10.3.2",
-      "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.3.2.tgz",
-      "integrity": "sha512-0bxIdP9mmPiOJ6wHLj8bdJRq+51oddObeCGdEf6PNEhYd93ZYAN+lPRnEOVFtheVwDM7+p+tza3LAQgp0PTudg==",
+      "version": "10.4.0",
+      "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.0.tgz",
+      "integrity": "sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==",
       "dev": true,
       "peer": true,
       "dependencies": {
@@ -2529,9 +2549,9 @@
       }
     },
     "node_modules/@testing-library/jest-dom": {
-      "version": "6.4.6",
-      "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.4.6.tgz",
-      "integrity": "sha512-8qpnGVincVDLEcQXWaHOf6zmlbwTKc6Us6PPu4CRnPXCzo2OGBS5cwgMMOWdxDpEz1mkbvXHpEy99M5Yvt682w==",
+      "version": "6.4.8",
+      "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.4.8.tgz",
+      "integrity": "sha512-JD0G+Zc38f5MBHA4NgxQMR5XtO5Jx9g86jqturNTt2WUfRmLDIY7iKkWHDCCTiDuFMre6nxAD5wHw9W5kI4rGw==",
       "dev": true,
       "dependencies": {
         "@adobe/css-tools": "^4.4.0",
@@ -2547,30 +2567,6 @@
         "node": ">=14",
         "npm": ">=6",
         "yarn": ">=1"
-      },
-      "peerDependencies": {
-        "@jest/globals": ">= 28",
-        "@types/bun": "latest",
-        "@types/jest": ">= 28",
-        "jest": ">= 28",
-        "vitest": ">= 0.32"
-      },
-      "peerDependenciesMeta": {
-        "@jest/globals": {
-          "optional": true
-        },
-        "@types/bun": {
-          "optional": true
-        },
-        "@types/jest": {
-          "optional": true
-        },
-        "jest": {
-          "optional": true
-        },
-        "vitest": {
-          "optional": true
-        }
       }
     },
     "node_modules/@testing-library/jest-dom/node_modules/ansi-styles": {
@@ -2823,9 +2819,9 @@
       }
     },
     "node_modules/@types/eslint": {
-      "version": "8.56.10",
-      "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.10.tgz",
-      "integrity": "sha512-Shavhk87gCtY2fhXDctcfS3e6FdxWkCx1iUZ9eEUbh7rTqlZT0/IzOkCOVt0fCjcFuZ9FPYfuezTBImfHCDBGQ==",
+      "version": "8.56.11",
+      "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.11.tgz",
+      "integrity": "sha512-sVBpJMf7UPo/wGecYOpk2aQya2VUGeHhe38WG7/mN5FufNSubf5VT9Uh9Uyp8/eLJpu1/tuhJ/qTo4mhSB4V4Q==",
       "dev": true,
       "dependencies": {
         "@types/estree": "*",
@@ -3001,9 +2997,9 @@
       "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g=="
     },
     "node_modules/@types/plotly.js": {
-      "version": "2.33.0",
-      "resolved": "https://registry.npmjs.org/@types/plotly.js/-/plotly.js-2.33.0.tgz",
-      "integrity": "sha512-oEyRCLLShp7lX4lRodFsigEv9z9OyL+UGWTKNdmpEghA3XR1CK+MjUPiIMf79625bvUvydOQ7kRx8cjCeG4Wvg==",
+      "version": "2.33.1",
+      "resolved": "https://registry.npmjs.org/@types/plotly.js/-/plotly.js-2.33.1.tgz",
+      "integrity": "sha512-IeU1cO8MyN/PdQrbiyCbUk63AdN1v9cQapg4D3YeTXoMLdDcmv8qq+Eqb+oPamyd+wWsiKHzAD/Q5lSlSwCCaw==",
       "dev": true
     },
     "node_modules/@types/prop-types": {
@@ -3146,16 +3142,16 @@
       "dev": true
     },
     "node_modules/@typescript-eslint/eslint-plugin": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.16.1.tgz",
-      "integrity": "sha512-SxdPak/5bO0EnGktV05+Hq8oatjAYVY3Zh2bye9pGZy6+jwyR3LG3YKkV4YatlsgqXP28BTeVm9pqwJM96vf2A==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.17.0.tgz",
+      "integrity": "sha512-pyiDhEuLM3PuANxH7uNYan1AaFs5XE0zw1hq69JBvGvE7gSuEoQl1ydtEe/XQeoC3GQxLXyOVa5kNOATgM638A==",
       "dev": true,
       "dependencies": {
         "@eslint-community/regexpp": "^4.10.0",
-        "@typescript-eslint/scope-manager": "7.16.1",
-        "@typescript-eslint/type-utils": "7.16.1",
-        "@typescript-eslint/utils": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1",
+        "@typescript-eslint/scope-manager": "7.17.0",
+        "@typescript-eslint/type-utils": "7.17.0",
+        "@typescript-eslint/utils": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0",
         "graphemer": "^1.4.0",
         "ignore": "^5.3.1",
         "natural-compare": "^1.4.0",
@@ -3179,15 +3175,15 @@
       }
     },
     "node_modules/@typescript-eslint/parser": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.16.1.tgz",
-      "integrity": "sha512-u+1Qx86jfGQ5i4JjK33/FnawZRpsLxRnKzGE6EABZ40KxVT/vWsiZFEBBHjFOljmmV3MBYOHEKi0Jm9hbAOClA==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.17.0.tgz",
+      "integrity": "sha512-puiYfGeg5Ydop8eusb/Hy1k7QmOU6X3nvsqCgzrB2K4qMavK//21+PzNE8qeECgNOIoertJPUC1SpegHDI515A==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/scope-manager": "7.16.1",
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/typescript-estree": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1",
+        "@typescript-eslint/scope-manager": "7.17.0",
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/typescript-estree": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0",
         "debug": "^4.3.4"
       },
       "engines": {
@@ -3207,13 +3203,13 @@
       }
     },
     "node_modules/@typescript-eslint/scope-manager": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.16.1.tgz",
-      "integrity": "sha512-nYpyv6ALte18gbMz323RM+vpFpTjfNdyakbf3nsLvF43uF9KeNC289SUEW3QLZ1xPtyINJ1dIsZOuWuSRIWygw==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.17.0.tgz",
+      "integrity": "sha512-0P2jTTqyxWp9HiKLu/Vemr2Rg1Xb5B7uHItdVZ6iAenXmPo4SZ86yOPCJwMqpCyaMiEHTNqizHfsbmCFT1x9SA==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1"
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0"
       },
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -3224,13 +3220,13 @@
       }
     },
     "node_modules/@typescript-eslint/type-utils": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.16.1.tgz",
-      "integrity": "sha512-rbu/H2MWXN4SkjIIyWcmYBjlp55VT+1G3duFOIukTNFxr9PI35pLc2ydwAfejCEitCv4uztA07q0QWanOHC7dA==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.17.0.tgz",
+      "integrity": "sha512-XD3aaBt+orgkM/7Cei0XNEm1vwUxQ958AOLALzPlbPqb8C1G8PZK85tND7Jpe69Wualri81PLU+Zc48GVKIMMA==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/typescript-estree": "7.16.1",
-        "@typescript-eslint/utils": "7.16.1",
+        "@typescript-eslint/typescript-estree": "7.17.0",
+        "@typescript-eslint/utils": "7.17.0",
         "debug": "^4.3.4",
         "ts-api-utils": "^1.3.0"
       },
@@ -3251,9 +3247,9 @@
       }
     },
     "node_modules/@typescript-eslint/types": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.16.1.tgz",
-      "integrity": "sha512-AQn9XqCzUXd4bAVEsAXM/Izk11Wx2u4H3BAfQVhSfzfDOm/wAON9nP7J5rpkCxts7E5TELmN845xTUCQrD1xIQ==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.17.0.tgz",
+      "integrity": "sha512-a29Ir0EbyKTKHnZWbNsrc/gqfIBqYPwj3F2M+jWE/9bqfEHg0AMtXzkbUkOG6QgEScxh2+Pz9OXe11jHDnHR7A==",
       "dev": true,
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -3264,13 +3260,13 @@
       }
     },
     "node_modules/@typescript-eslint/typescript-estree": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.16.1.tgz",
-      "integrity": "sha512-0vFPk8tMjj6apaAZ1HlwM8w7jbghC8jc1aRNJG5vN8Ym5miyhTQGMqU++kuBFDNKe9NcPeZ6x0zfSzV8xC1UlQ==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.17.0.tgz",
+      "integrity": "sha512-72I3TGq93t2GoSBWI093wmKo0n6/b7O4j9o8U+f65TVD0FS6bI2180X5eGEr8MA8PhKMvYe9myZJquUT2JkCZw==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1",
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0",
         "debug": "^4.3.4",
         "globby": "^11.1.0",
         "is-glob": "^4.0.3",
@@ -3292,15 +3288,15 @@
       }
     },
     "node_modules/@typescript-eslint/utils": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.16.1.tgz",
-      "integrity": "sha512-WrFM8nzCowV0he0RlkotGDujx78xudsxnGMBHI88l5J8wEhED6yBwaSLP99ygfrzAjsQvcYQ94quDwI0d7E1fA==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.17.0.tgz",
+      "integrity": "sha512-r+JFlm5NdB+JXc7aWWZ3fKSm1gn0pkswEwIYsrGPdsT2GjsRATAKXiNtp3vgAAO1xZhX8alIOEQnNMl3kbTgJw==",
       "dev": true,
       "dependencies": {
         "@eslint-community/eslint-utils": "^4.4.0",
-        "@typescript-eslint/scope-manager": "7.16.1",
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/typescript-estree": "7.16.1"
+        "@typescript-eslint/scope-manager": "7.17.0",
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/typescript-estree": "7.17.0"
       },
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -3314,12 +3310,12 @@
       }
     },
     "node_modules/@typescript-eslint/visitor-keys": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.16.1.tgz",
-      "integrity": "sha512-Qlzzx4sE4u3FsHTPQAAQFJFNOuqtuY0LFrZHwQ8IHK705XxBiWOFkfKRWu6niB7hwfgnwIpO4jTC75ozW1PHWg==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.17.0.tgz",
+      "integrity": "sha512-RVGC9UhPOCsfCdI9pU++K4nD7to+jTcMIbXTSOcrLqUEW6gF2pU1UUbYJKc9cvcRSK1UDeMJ7pdMxf4bhMpV/A==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.16.1",
+        "@typescript-eslint/types": "7.17.0",
         "eslint-visitor-keys": "^3.4.3"
       },
       "engines": {
@@ -3921,18 +3917,6 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
-    "node_modules/array.prototype.toreversed": {
-      "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz",
-      "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==",
-      "dev": true,
-      "dependencies": {
-        "call-bind": "^1.0.2",
-        "define-properties": "^1.2.0",
-        "es-abstract": "^1.22.1",
-        "es-shim-unscopables": "^1.0.0"
-      }
-    },
     "node_modules/array.prototype.tosorted": {
       "version": "1.1.4",
       "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz",
@@ -4422,9 +4406,9 @@
       }
     },
     "node_modules/caniuse-lite": {
-      "version": "1.0.30001642",
-      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001642.tgz",
-      "integrity": "sha512-3XQ0DoRgLijXJErLSl+bLnJ+Et4KqV1PY6JJBGAFlsNsz31zeAIncyeZfLCabHK/jtSh+671RM9YMldxjUPZtA==",
+      "version": "1.0.30001643",
+      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001643.tgz",
+      "integrity": "sha512-ERgWGNleEilSrHM6iUz/zJNSQTP8Mr21wDWpdgvRwcTXGAq6jMtOUPP4dqFPTdKqZ2wKTdtB+uucZ3MRpAUSmg==",
       "funding": [
         {
           "type": "opencollective",
@@ -6009,9 +5993,9 @@
       }
     },
     "node_modules/electron-to-chromium": {
-      "version": "1.4.828",
-      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.828.tgz",
-      "integrity": "sha512-QOIJiWpQJDHAVO4P58pwb133Cwee0nbvy/MV1CwzZVGpkH1RX33N3vsaWRCpR6bF63AAq366neZrRTu7Qlsbbw=="
+      "version": "1.5.0",
+      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.0.tgz",
+      "integrity": "sha512-Vb3xHHYnLseK8vlMJQKJYXJ++t4u1/qJ3vykuVrVjvdiOEhYyT1AuP4x03G8EnPmYvYOhe9T+dADTmthjRQMkA=="
     },
     "node_modules/element-size": {
       "version": "1.1.1",
@@ -6453,15 +6437,14 @@
       }
     },
     "node_modules/eslint-plugin-react": {
-      "version": "7.34.4",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.4.tgz",
-      "integrity": "sha512-Np+jo9bUwJNxCsT12pXtrGhJgT3T44T1sHhn1Ssr42XFn8TES0267wPGo5nNrMHi8qkyimDAX2BUmkf9pSaVzA==",
+      "version": "7.35.0",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.35.0.tgz",
+      "integrity": "sha512-v501SSMOWv8gerHkk+IIQBkcGRGrO2nfybfj5pLxuJNFTPxxA3PSryhXTK+9pNbtkggheDdsC0E9Q8CuPk6JKA==",
       "dev": true,
       "dependencies": {
         "array-includes": "^3.1.8",
         "array.prototype.findlast": "^1.2.5",
         "array.prototype.flatmap": "^1.3.2",
-        "array.prototype.toreversed": "^1.1.2",
         "array.prototype.tosorted": "^1.1.4",
         "doctrine": "^2.1.0",
         "es-iterator-helpers": "^1.0.19",
@@ -6482,7 +6465,7 @@
         "node": ">=4"
       },
       "peerDependencies": {
-        "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8"
+        "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7"
       }
     },
     "node_modules/eslint-plugin-react-hooks": {
@@ -8054,12 +8037,12 @@
       }
     },
     "node_modules/husky": {
-      "version": "9.0.11",
-      "resolved": "https://registry.npmjs.org/husky/-/husky-9.0.11.tgz",
-      "integrity": "sha512-AB6lFlbwwyIqMdHYhwPe+kjOC3Oc5P3nThEoW/AaO2BX3vJDjWPFxYLxokUZOo6RNX20He3AaT8sESs9NJcmEw==",
+      "version": "9.1.1",
+      "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.1.tgz",
+      "integrity": "sha512-fCqlqLXcBnXa/TJXmT93/A36tJsjdJkibQ1MuIiFyCCYUlpYpIaj2mv1w+3KR6Rzu1IC3slFTje5f6DUp2A2rg==",
       "dev": true,
       "bin": {
-        "husky": "bin.mjs"
+        "husky": "bin.js"
       },
       "engines": {
         "node": ">=18"
@@ -8135,9 +8118,9 @@
       }
     },
     "node_modules/import-local": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz",
-      "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==",
+      "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz",
+      "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==",
       "dev": true,
       "dependencies": {
         "pkg-dir": "^4.2.0",
@@ -8327,9 +8310,9 @@
       }
     },
     "node_modules/is-core-module": {
-      "version": "2.14.0",
-      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.14.0.tgz",
-      "integrity": "sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==",
+      "version": "2.15.0",
+      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz",
+      "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==",
       "dependencies": {
         "hasown": "^2.0.2"
       },
@@ -8860,9 +8843,9 @@
       }
     },
     "node_modules/jake": {
-      "version": "10.9.1",
-      "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.1.tgz",
-      "integrity": "sha512-61btcOHNnLnsOdtLgA5efqQWjnSi/vow5HbI7HMdKKWqvrKR1bLK3BPlJn9gcSaP2ewuamUSMB5XEy76KUIS2w==",
+      "version": "10.9.2",
+      "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz",
+      "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==",
       "dev": true,
       "dependencies": {
         "async": "^3.2.3",
@@ -12332,9 +12315,9 @@
       "dev": true
     },
     "node_modules/node-releases": {
-      "version": "2.0.14",
-      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz",
-      "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw=="
+      "version": "2.0.18",
+      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz",
+      "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g=="
     },
     "node_modules/normalize-path": {
       "version": "3.0.0",
@@ -12887,9 +12870,9 @@
       }
     },
     "node_modules/plotly.js": {
-      "version": "2.33.0",
-      "resolved": "https://registry.npmjs.org/plotly.js/-/plotly.js-2.33.0.tgz",
-      "integrity": "sha512-pzuf6hSUCaSYmEag2b2DngkHdYMn+U/QMSC/UJOLIS8yd2UwIG1iGUmOR7pqZIS87oKx/+cMoG8aknGytgJKig==",
+      "version": "2.34.0",
+      "resolved": "https://registry.npmjs.org/plotly.js/-/plotly.js-2.34.0.tgz",
+      "integrity": "sha512-dG2LC6wY6AUR1jsnriBi9xbigLPEEXXOHhLo97dRiZAWZVS6lZCmXXZ227U4rsoluXyfyqQezaKq7svolap8Dw==",
       "dependencies": {
         "@plotly/d3": "3.8.2",
         "@plotly/d3-sankey": "0.7.2",
@@ -13422,9 +13405,9 @@
       }
     },
     "node_modules/react-router": {
-      "version": "6.25.0",
-      "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.25.0.tgz",
-      "integrity": "sha512-bziKjCcDbcxgWS9WlWFcQIVZ2vJHnCP6DGpQDT0l+0PFDasfJKgzf9CM22eTyhFsZkjk8ApCdKjJwKtzqH80jQ==",
+      "version": "6.25.1",
+      "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.25.1.tgz",
+      "integrity": "sha512-u8ELFr5Z6g02nUtpPAggP73Jigj1mRePSwhS/2nkTrlPU5yEkH1vYzWNyvSnSzeeE2DNqWdH+P8OhIh9wuXhTw==",
       "dependencies": {
         "@remix-run/router": "1.18.0"
       },
@@ -13436,12 +13419,12 @@
       }
     },
     "node_modules/react-router-dom": {
-      "version": "6.25.0",
-      "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.25.0.tgz",
-      "integrity": "sha512-BhcczgDWWgvGZxjDDGuGHrA8HrsSudilqTaRSBYLWDayvo1ClchNIDVt5rldqp6e7Dro5dEFx9Mzc+r292lN0w==",
+      "version": "6.25.1",
+      "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.25.1.tgz",
+      "integrity": "sha512-0tUDpbFvk35iv+N89dWNrJp+afLgd+y4VtorJZuOCXK0kkCWjEvb3vTJM++SYvMEpbVwXKf3FjeVveVEb6JpDQ==",
       "dependencies": {
         "@remix-run/router": "1.18.0",
-        "react-router": "6.25.0"
+        "react-router": "6.25.1"
       },
       "engines": {
         "node": ">=14.0.0"
@@ -14037,9 +14020,9 @@
       "dev": true
     },
     "node_modules/semver": {
-      "version": "7.6.2",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
-      "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
+      "version": "7.6.3",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
+      "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
       "dev": true,
       "bin": {
         "semver": "bin/semver.js"
@@ -14133,12 +14116,12 @@
       }
     },
     "node_modules/shiki": {
-      "version": "1.10.3",
-      "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.10.3.tgz",
-      "integrity": "sha512-eneCLncGuvPdTutJuLyUGS8QNPAVFO5Trvld2wgEq1e002mwctAhJKeMGWtWVXOIEzmlcLRqcgPSorR6AVzOmQ==",
+      "version": "1.11.1",
+      "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.11.1.tgz",
+      "integrity": "sha512-VHD3Q0EBXaaa245jqayBe5zQyMQUdXBFjmGr9MpDaDpAKRMYn7Ff00DM5MLk26UyKjnml3yQ0O2HNX7PtYVNFQ==",
       "dev": true,
       "dependencies": {
-        "@shikijs/core": "1.10.3",
+        "@shikijs/core": "1.11.1",
         "@types/hast": "^3.0.4"
       }
     },
@@ -15049,13 +15032,13 @@
       }
     },
     "node_modules/ts-jest": {
-      "version": "29.2.2",
-      "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.2.tgz",
-      "integrity": "sha512-sSW7OooaKT34AAngP6k1VS669a0HdLxkQZnlC7T76sckGCokXFnvJ3yRlQZGRTAoV5K19HfSgCiSwWOSIfcYlg==",
+      "version": "29.2.3",
+      "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.3.tgz",
+      "integrity": "sha512-yCcfVdiBFngVz9/keHin9EnsrQtQtEu3nRykNy9RVp+FiPFFbPJ3Sg6Qg4+TkmH0vMP5qsTKgXSsk80HRwvdgQ==",
       "dev": true,
       "dependencies": {
         "bs-logger": "0.x",
-        "ejs": "^3.0.0",
+        "ejs": "^3.1.10",
         "fast-json-stable-stringify": "2.x",
         "jest-util": "^29.0.0",
         "json5": "^2.2.3",
@@ -15326,9 +15309,9 @@
       }
     },
     "node_modules/typedoc": {
-      "version": "0.26.4",
-      "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.26.4.tgz",
-      "integrity": "sha512-FlW6HpvULDKgc3rK04V+nbFyXogPV88hurarDPOjuuB5HAwuAlrCMQ5NeH7Zt68a/ikOKu6Z/0hFXAeC9xPccQ==",
+      "version": "0.26.5",
+      "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.26.5.tgz",
+      "integrity": "sha512-Vn9YKdjKtDZqSk+by7beZ+xzkkr8T8CYoiasqyt4TTRFy5+UHzL/mF/o4wGBjRF+rlWQHDb0t6xCpA3JNL5phg==",
       "dev": true,
       "dependencies": {
         "lunr": "^2.3.9",
@@ -15348,9 +15331,9 @@
       }
     },
     "node_modules/typedoc-plugin-markdown": {
-      "version": "4.2.1",
-      "resolved": "https://registry.npmjs.org/typedoc-plugin-markdown/-/typedoc-plugin-markdown-4.2.1.tgz",
-      "integrity": "sha512-7hQt/1WaW/VI4+x3sxwcCGsEylP1E1GvF6OTTELK5sfTEp6AeK+83jkCOgZGp1pI2DiOammMYQMnxxOny9TKsQ==",
+      "version": "4.2.2",
+      "resolved": "https://registry.npmjs.org/typedoc-plugin-markdown/-/typedoc-plugin-markdown-4.2.2.tgz",
+      "integrity": "sha512-4Amnhjiw4L9aN5yBn6Ryh5WZr+uW41e6IU3EuQCNcVWgHQC+tlNIbbQMKVYAb33ES7yaM01dAXGS4BdJtQi7mA==",
       "dev": true,
       "engines": {
         "node": ">= 18"
@@ -15372,9 +15355,9 @@
       }
     },
     "node_modules/typescript": {
-      "version": "5.5.3",
-      "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
-      "integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
+      "version": "5.5.4",
+      "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz",
+      "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==",
       "dev": true,
       "bin": {
         "tsc": "bin/tsc",

+ 1 - 0
frontend/taipy-gui/packaging/taipy-gui.d.ts

@@ -121,6 +121,7 @@ export interface FilterDesc {
     col: string;
     action: string;
     value: string | number | boolean | Date;
+    type: string;
 }
 export interface TableFilterProps {
     columns: Record<string, ColumnDesc>;

+ 16 - 11
frontend/taipy-gui/src/components/Taipy/Chart.tsx

@@ -160,11 +160,11 @@ const getDecimatorsPayload = (
                             xAxis: getAxis(traces, i, columns, 0),
                             yAxis: getAxis(traces, i, columns, 1),
                             zAxis: getAxis(traces, i, columns, 2),
-                            chartMode: modes[i]
+                            chartMode: modes[i],
                         }
                       : undefined
               ),
-              relayoutData: relayoutData
+              relayoutData: relayoutData,
           }
         : undefined;
 };
@@ -225,14 +225,19 @@ export const TaipyPlotlyButtons: ModeBarButtonAny[] = [
             if (!div) {
                 return;
             }
-            const {height} = gd.dataset;
+            const { height, width } = gd.dataset;
             if (!height) {
-                gd.setAttribute("data-height", getComputedStyle(div).height);
+                const st = getComputedStyle(div);
+                gd.setAttribute("data-height", st.height);
+                gd.setAttribute("data-width", st.width);
             }
             const fs = gd.classList.toggle("full-screen");
             (evt.currentTarget as HTMLElement).setAttribute("data-title", fs ? "Exit Full screen" : "Full screen");
-            if (height && !fs) {
-                div.attributeStyleMap.set("height", height);
+            if (!fs) {
+                // height && div.attributeStyleMap.set("height", height);
+                height && (div.style.height = height);
+                // width && div.attributeStyleMap.set("width", width);
+                width && (div.style.width = width);
             }
             window.dispatchEvent(new Event("resize"));
         }
@@ -425,9 +430,9 @@ const Chart = (props: ChartProp) => {
                       mode: config.modes[idx],
                       name:
                           getArrayValue(config.names, idx) ||
-                          (config.columns[trace[1]] ? getColNameFromIndexed(config.columns[trace[1]].dfid) : undefined)
+                          (config.columns[trace[1]] ? getColNameFromIndexed(config.columns[trace[1]].dfid) : undefined),
                   } as Record<string, unknown>;
-                  ret.marker = {...getArrayValue(config.markers, idx, ret.marker || {})};
+                  ret.marker = { ...getArrayValue(config.markers, idx, ret.marker || {}) };
                   if (Object.keys(ret.marker as object).length) {
                       MARKER_TO_COL.forEach((prop) => {
                           const val = (ret.marker as Record<string, unknown>)[prop];
@@ -492,7 +497,7 @@ const Chart = (props: ChartProp) => {
                   ret.textposition = getArrayValue(config.textAnchors, idx);
                   const selectedMarker = getArrayValue(config.selectedMarkers, idx);
                   if (selectedMarker) {
-                      ret.selected = {marker: selectedMarker};
+                      ret.selected = { marker: selectedMarker };
                   }
                   return ret as Data;
               })
@@ -579,8 +584,8 @@ const Chart = (props: ChartProp) => {
                 ? props.figure
                     ? index
                     : data[dataKey].tp_index
-                      ? (data[dataKey].tp_index[index] as number)
-                      : index
+                    ? (data[dataKey].tp_index[index] as number)
+                    : index
                 : 0,
         [data, dataKey, props.figure]
     );

+ 40 - 14
frontend/taipy-gui/src/components/Taipy/Dialog.spec.tsx

@@ -12,7 +12,7 @@
  */
 
 import React from "react";
-import { render } from "@testing-library/react";
+import { fireEvent, render } from "@testing-library/react";
 import "@testing-library/jest-dom";
 import userEvent from "@testing-library/user-event";
 
@@ -44,7 +44,7 @@ describe("Dialog Component", () => {
         const { getByText } = render(
             <HelmetProvider>
                 <Dialog title="Dialog-Test-Title" page="page" open={true} />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         const elt = getByText("Dialog-Test-Title");
         expect(elt.tagName).toBe("H2");
@@ -54,7 +54,7 @@ describe("Dialog Component", () => {
         const { queryAllByText } = render(
             <HelmetProvider>
                 <Dialog title="Dialog-Test-Title" page="page" open={false} />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         expect(queryAllByText("Dialog-Test-Title")).toHaveLength(0);
         const divs = document.getElementsByTagName("div");
@@ -65,7 +65,7 @@ describe("Dialog Component", () => {
         const wrapper = render(
             <HelmetProvider>
                 <Dialog title="Dialog-Test-Title" page="page" open={true} className="taipy-dialog" />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         const elt = document.querySelector(".MuiDialog-root");
         expect(elt).toHaveClass("taipy-dialog");
@@ -79,7 +79,7 @@ describe("Dialog Component", () => {
                     defaultOpen="true"
                     open={undefined as unknown as boolean}
                 />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         getByText("Dialog-Test-Title");
     });
@@ -92,7 +92,7 @@ describe("Dialog Component", () => {
                     defaultOpen="true"
                     open={undefined as unknown as boolean}
                 />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         expect(getAllByRole("button")).toHaveLength(1);
     });
@@ -106,7 +106,7 @@ describe("Dialog Component", () => {
                     open={undefined as unknown as boolean}
                     labels={JSON.stringify(["toto"])}
                 />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         expect(getAllByRole("button")).toHaveLength(2);
     });
@@ -120,7 +120,7 @@ describe("Dialog Component", () => {
                     open={undefined as unknown as boolean}
                     labels={JSON.stringify(["toto", "titi", "toto"])}
                 />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         expect(getAllByRole("button")).toHaveLength(4);
     });
@@ -134,7 +134,7 @@ describe("Dialog Component", () => {
                     active={false}
                     labels={JSON.stringify(["testValidate", "testCancel"])}
                 />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         expect(getByText("testValidate")).toBeDisabled();
         expect(getByText("testCancel")).toBeDisabled();
@@ -148,7 +148,7 @@ describe("Dialog Component", () => {
                     open={true}
                     labels={JSON.stringify(["testValidate", "testCancel"])}
                 />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         expect(getByText("testValidate")).not.toBeDisabled();
         expect(getByText("testCancel")).not.toBeDisabled();
@@ -163,7 +163,7 @@ describe("Dialog Component", () => {
                     active={true}
                     labels={JSON.stringify(["testValidate", "testCancel"])}
                 />
-            </HelmetProvider>
+            </HelmetProvider>,
         );
         expect(getByText("testValidate")).not.toBeDisabled();
         expect(getByText("testCancel")).not.toBeDisabled();
@@ -183,7 +183,7 @@ describe("Dialog Component", () => {
                         onAction="testValidateAction"
                     />
                 </HelmetProvider>
-            </TaipyContext.Provider>
+            </TaipyContext.Provider>,
         );
         await userEvent.click(getByTitle("Close"));
         expect(dispatch).toHaveBeenLastCalledWith({
@@ -208,7 +208,7 @@ describe("Dialog Component", () => {
                         onAction="testValidateAction"
                     />
                 </HelmetProvider>
-            </TaipyContext.Provider>
+            </TaipyContext.Provider>,
         );
         await userEvent.click(getByText("testValidate"));
         expect(dispatch).toHaveBeenLastCalledWith({
@@ -233,7 +233,7 @@ describe("Dialog Component", () => {
                         onAction="testValidateAction"
                     />
                 </HelmetProvider>
-            </TaipyContext.Provider>
+            </TaipyContext.Provider>,
         );
         await userEvent.click(getByText("Another One"));
         expect(dispatch).toHaveBeenLastCalledWith({
@@ -242,4 +242,30 @@ describe("Dialog Component", () => {
             type: "SEND_ACTION_ACTION",
         });
     });
+    it("should log an error when labels prop is not a valid JSON string", () => {
+        const consoleSpy = jest.spyOn(console, "info");
+        render(<Dialog title={"Dialog-Test-Title"} labels={"not a valid JSON string"} />);
+        expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining("Error parsing dialog.labels"));
+    });
+    it("should apply width and height styles when they are provided", async () => {
+        const { findByRole } = render(<Dialog title="Dialog-Test-Title" width="500px" height="300px" open={true} />);
+        const dialogElement = await findByRole("dialog");
+        expect(dialogElement).toHaveStyle({ width: "500px", height: "300px" });
+    });
+    it("should not apply width and height styles when they are not provided", async () => {
+        const { findByRole } = render(<Dialog title="Dialog-Test-Title" open={true} />);
+        const dialogElement = await findByRole("dialog");
+        const computedStyles = window.getComputedStyle(dialogElement);
+        expect(computedStyles.width).not.toBe("500px");
+        expect(computedStyles.height).not.toBe("300px");
+    });
+    it("calls localAction prop when handleAction is triggered", () => {
+        const localActionMock = jest.fn();
+        const { getByLabelText } = render(
+            <Dialog id="test-dialog" title="Test Dialog" localAction={localActionMock} open={true} />,
+        );
+        const closeButton = getByLabelText("close");
+        fireEvent.click(closeButton);
+        expect(localActionMock).toHaveBeenCalledWith(-1);
+    });
 });

+ 17 - 5
frontend/taipy-gui/src/components/Taipy/TableFilter.spec.tsx

@@ -12,7 +12,7 @@
  */
 
 import React from "react";
-import { getByTitle, render } from "@testing-library/react";
+import { render } from "@testing-library/react";
 import "@testing-library/jest-dom";
 import userEvent from "@testing-library/user-event";
 
@@ -140,7 +140,7 @@ describe("Table Filter Component", () => {
     });
     it("behaves on date column", async () => {
         const { getByTestId, getAllByTestId, findByRole, getByText, getByPlaceholderText } = render(
-                <TableFilter columns={tableColumns} colsOrder={colsOrder} onValidate={jest.fn()} filteredCount={0} />
+            <TableFilter columns={tableColumns} colsOrder={colsOrder} onValidate={jest.fn()} filteredCount={0} />
         );
         const elt = getByTestId("FilterListIcon");
         await userEvent.click(elt);
@@ -155,7 +155,7 @@ describe("Table Filter Component", () => {
         const validate = getByTestId("CheckIcon").parentElement;
         expect(validate).toBeDisabled();
         const input = getByPlaceholderText("YYYY/MM/DD");
-        await userEvent.type(input, "{ArrowLeft}{ArrowLeft}{ArrowLeft}2020/11/11", {delay: 1});
+        await userEvent.type(input, "{ArrowLeft}{ArrowLeft}{ArrowLeft}2020/11/11", { delay: 1 });
         expect(validate).not.toBeDisabled();
     });
     it("adds a row on validation", async () => {
@@ -212,7 +212,13 @@ describe("Table Filter Component", () => {
     it("reset filters", async () => {
         const onValidate = jest.fn();
         const { getAllByTestId, getByTestId } = render(
-            <TableFilter columns={tableColumns} colsOrder={colsOrder} onValidate={onValidate} appliedFilters={[{col: "StringCol", action: "==", value: ""}]} filteredCount={0} />
+            <TableFilter
+                columns={tableColumns}
+                colsOrder={colsOrder}
+                onValidate={onValidate}
+                appliedFilters={[{ col: "StringCol", action: "==", value: "", type: "" }]}
+                filteredCount={0}
+            />
         );
         const elt = getByTestId("FilterListIcon");
         await userEvent.click(elt);
@@ -225,7 +231,13 @@ describe("Table Filter Component", () => {
     });
     it("ignores unapplicable filters", async () => {
         const { getAllByTestId, getByTestId } = render(
-            <TableFilter columns={tableColumns} colsOrder={colsOrder} onValidate={jest.fn()} appliedFilters={[{col: "unknown col", action: "==", value: ""}]} filteredCount={0} />
+            <TableFilter
+                columns={tableColumns}
+                colsOrder={colsOrder}
+                onValidate={jest.fn()}
+                appliedFilters={[{ col: "unknown col", action: "==", value: "", type: "" }]}
+                filteredCount={0}
+            />
         );
         const elt = getByTestId("FilterListIcon");
         await userEvent.click(elt);

+ 2 - 0
frontend/taipy-gui/src/components/Taipy/TableFilter.tsx

@@ -38,6 +38,7 @@ export interface FilterDesc {
     col: string;
     action: string;
     value: string | number | boolean | Date;
+    type: string;
 }
 
 interface TableFilterProps {
@@ -118,6 +119,7 @@ const getFilterDesc = (columns: Record<string, ColumnDesc>, colId?: string, act?
                             ? getDateTime(val)
                             : val
                         : val,
+                type: colType,
             } as FilterDesc;
         } catch (e) {
             console.info("could not parse value ", val, e);

+ 22 - 21
frontend/taipy-gui/src/context/taipyReducers.spec.ts

@@ -64,7 +64,7 @@ const sendWsMessageSpy = jest.spyOn(wsUtils, "sendWsMessage");
 describe("reducer", () => {
     it("store socket connected", async () => {
         expect(
-            taipyReducer({ ...INITIAL_STATE }, { type: "SOCKET_CONNECTED" } as TaipyBaseAction).isSocketConnected,
+            taipyReducer({ ...INITIAL_STATE }, { type: "SOCKET_CONNECTED" } as TaipyBaseAction).isSocketConnected
         ).toBeDefined();
     });
     it("returns update", async () => {
@@ -73,7 +73,7 @@ describe("reducer", () => {
                 type: "UPDATE",
                 name: "name",
                 payload: { value: "value" },
-            } as TaipyBaseAction).data.name,
+            } as TaipyBaseAction).data.name
         ).toBeDefined();
     });
     it("store locations", async () => {
@@ -81,7 +81,7 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE }, {
                 type: "SET_LOCATIONS",
                 payload: { value: { loc: "loc" } },
-            } as TaipyBaseAction).locations,
+            } as TaipyBaseAction).locations
         ).toBeDefined();
     });
     it("set alert", async () => {
@@ -91,7 +91,7 @@ describe("reducer", () => {
                 atype: "i",
                 message: "message",
                 system: "system",
-            } as TaipyBaseAction).alerts,
+            } as TaipyBaseAction).alerts
         ).toHaveLength(1);
     });
     it("set show block", async () => {
@@ -100,7 +100,7 @@ describe("reducer", () => {
                 type: "SET_BLOCK",
                 action: "action",
                 message: "message",
-            } as TaipyBaseAction).block,
+            } as TaipyBaseAction).block
         ).toBeDefined();
     });
     it("set hide block", async () => {
@@ -110,7 +110,7 @@ describe("reducer", () => {
                 action: "action",
                 message: "message",
                 close: true,
-            } as TaipyBaseAction).block,
+            } as TaipyBaseAction).block
         ).toBeUndefined();
     });
     it("set navigate", async () => {
@@ -119,7 +119,7 @@ describe("reducer", () => {
                 type: "NAVIGATE",
                 to: "navigateTo",
                 tab: "_blank",
-            } as TaipyBaseAction).navigateTo,
+            } as TaipyBaseAction).navigateTo
         ).toBeDefined();
     });
     it("set client id", async () => {
@@ -130,7 +130,7 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE }, {
                 type: "ACKNOWLEDGEMENT",
                 id: "id",
-            } as TaipyBaseAction),
+            } as TaipyBaseAction)
         ).toEqual(INITIAL_STATE);
     });
     it("remove Acknowledgement", async () => {
@@ -138,7 +138,7 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE, ackList: ["ack"] }, {
                 type: "ACKNOWLEDGEMENT",
                 id: "ack",
-            } as TaipyBaseAction),
+            } as TaipyBaseAction)
         ).toEqual(INITIAL_STATE);
     });
     it("set Theme", async () => {
@@ -146,7 +146,7 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE }, {
                 type: "SET_THEME",
                 payload: { value: "dark" },
-            } as TaipyBaseAction).theme,
+            } as TaipyBaseAction).theme
         ).toBeDefined();
     });
     it("set TimeZone", async () => {
@@ -154,7 +154,7 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE }, {
                 type: "SET_TIMEZONE",
                 payload: { timeZone: "tz" },
-            } as TaipyBaseAction).timeZone,
+            } as TaipyBaseAction).timeZone
         ).toBeDefined();
     });
     it("set default TimeZone", async () => {
@@ -162,12 +162,12 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE }, {
                 type: "SET_TIMEZONE",
                 payload: {},
-            } as TaipyBaseAction).timeZone,
+            } as TaipyBaseAction).timeZone
         ).toBeDefined();
     });
     it("set Menu", async () => {
         expect(
-            taipyReducer({ ...INITIAL_STATE }, { type: "SET_MENU", menu: {} } as TaipyBaseAction).menu,
+            taipyReducer({ ...INITIAL_STATE }, { type: "SET_MENU", menu: {} } as TaipyBaseAction).menu
         ).toBeDefined();
     });
     it("sets download", async () => {
@@ -175,12 +175,12 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE }, {
                 type: "DOWNLOAD_FILE",
                 content: {},
-            } as TaipyBaseAction).download,
+            } as TaipyBaseAction).download
         ).toBeDefined();
     });
     it("resets download", async () => {
         expect(
-            taipyReducer({ ...INITIAL_STATE }, { type: "DOWNLOAD_FILE" } as TaipyBaseAction).download,
+            taipyReducer({ ...INITIAL_STATE }, { type: "DOWNLOAD_FILE" } as TaipyBaseAction).download
         ).toBeUndefined();
     });
     it("sets partial", async () => {
@@ -189,7 +189,7 @@ describe("reducer", () => {
                 type: "PARTIAL",
                 name: "partial",
                 create: true,
-            } as TaipyBaseAction).data.partial,
+            } as TaipyBaseAction).data.partial
         ).toBeDefined();
     });
     it("resets partial", async () => {
@@ -197,7 +197,7 @@ describe("reducer", () => {
             taipyReducer({ ...INITIAL_STATE, data: { partial: true } }, {
                 type: "PARTIAL",
                 name: "partial",
-            } as TaipyBaseAction).data.partial,
+            } as TaipyBaseAction).data.partial
         ).toBeUndefined();
     });
     it("creates an alert action", () => {
@@ -333,6 +333,7 @@ describe("createRequestInfiniteTableUpdateAction function", () => {
                 value: "testValue",
                 col: "yourColValue",
                 action: "yourActionValue",
+                type: "yourTypeValue",
             },
         ];
         const action = createRequestInfiniteTableUpdateAction(
@@ -354,7 +355,7 @@ describe("createRequestInfiniteTableUpdateAction function", () => {
             compare,
             compareDatas,
             stateContext,
-            reverse,
+            reverse
         );
         expect(action.type).toEqual(Types.RequestDataUpdate);
         expect(action.name).toEqual(name);
@@ -396,7 +397,7 @@ describe("createRequestTableUpdateAction function", () => {
         const tooltips = { tooltipKey: "tooltipValue" };
         const handleNan = true;
         const filters = [
-            { field: "testField", operator: "testOperator", value: "testValue", col: "testCol", action: "testAction" },
+            { field: "testField", operator: "testOperator", value: "testValue", col: "testCol", action: "testAction", type: "type" },
         ];
         const compare = "testCompare";
         const compareDatas = "testCompareDatas";
@@ -419,7 +420,7 @@ describe("createRequestTableUpdateAction function", () => {
             filters,
             compare,
             compareDatas,
-            stateContext,
+            stateContext
         );
         expect(action.type).toEqual(Types.RequestDataUpdate);
         expect(action.name).toEqual(name);
@@ -1058,7 +1059,7 @@ describe("initializeWebSocket function", () => {
                 "mockId",
                 undefined,
                 false,
-                expect.any(Function),
+                expect.any(Function)
             );
         }
     });

+ 179 - 173
frontend/taipy/package-lock.json

@@ -41,7 +41,6 @@
       }
     },
     "../../taipy/gui/webapp": {
-      "name": "taipy-gui",
       "version": "4.0.0"
     },
     "node_modules/@babel/code-frame": {
@@ -235,15 +234,15 @@
       }
     },
     "node_modules/@emotion/babel-plugin": {
-      "version": "11.11.0",
-      "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz",
-      "integrity": "sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ==",
+      "version": "11.12.0",
+      "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.12.0.tgz",
+      "integrity": "sha512-y2WQb+oP8Jqvvclh8Q55gLUyb7UFvgv7eJfsj7td5TToBrIUtPay2kMrZi4xjq9qw2vD0ZR5fSho0yqoFgX7Rw==",
       "dependencies": {
         "@babel/helper-module-imports": "^7.16.7",
         "@babel/runtime": "^7.18.3",
-        "@emotion/hash": "^0.9.1",
-        "@emotion/memoize": "^0.8.1",
-        "@emotion/serialize": "^1.1.2",
+        "@emotion/hash": "^0.9.2",
+        "@emotion/memoize": "^0.9.0",
+        "@emotion/serialize": "^1.2.0",
         "babel-plugin-macros": "^3.1.0",
         "convert-source-map": "^1.5.0",
         "escape-string-regexp": "^4.0.0",
@@ -253,47 +252,47 @@
       }
     },
     "node_modules/@emotion/cache": {
-      "version": "11.11.0",
-      "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.11.0.tgz",
-      "integrity": "sha512-P34z9ssTCBi3e9EI1ZsWpNHcfY1r09ZO0rZbRO2ob3ZQMnFI35jB536qoXbkdesr5EUhYi22anuEJuyxifaqAQ==",
-      "dependencies": {
-        "@emotion/memoize": "^0.8.1",
-        "@emotion/sheet": "^1.2.2",
-        "@emotion/utils": "^1.2.1",
-        "@emotion/weak-memoize": "^0.3.1",
+      "version": "11.13.0",
+      "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.13.0.tgz",
+      "integrity": "sha512-hPV345J/tH0Cwk2wnU/3PBzORQ9HeX+kQSbwI+jslzpRCHE6fSGTohswksA/Ensr8znPzwfzKZCmAM9Lmlhp7g==",
+      "dependencies": {
+        "@emotion/memoize": "^0.9.0",
+        "@emotion/sheet": "^1.4.0",
+        "@emotion/utils": "^1.4.0",
+        "@emotion/weak-memoize": "^0.4.0",
         "stylis": "4.2.0"
       }
     },
     "node_modules/@emotion/hash": {
-      "version": "0.9.1",
-      "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.1.tgz",
-      "integrity": "sha512-gJB6HLm5rYwSLI6PQa+X1t5CFGrv1J1TWG+sOyMCeKz2ojaj6Fnl/rZEspogG+cvqbt4AE/2eIyD2QfLKTBNlQ=="
+      "version": "0.9.2",
+      "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz",
+      "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g=="
     },
     "node_modules/@emotion/is-prop-valid": {
-      "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.2.2.tgz",
-      "integrity": "sha512-uNsoYd37AFmaCdXlg6EYD1KaPOaRWRByMCYzbKUX4+hhMfrxdVSelShywL4JVaAeM/eHUOSprYBQls+/neX3pw==",
+      "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.3.0.tgz",
+      "integrity": "sha512-SHetuSLvJDzuNbOdtPVbq6yMMMlLoW5Q94uDqJZqy50gcmAjxFkVqmzqSGEFq9gT2iMuIeKV1PXVWmvUhuZLlQ==",
       "dependencies": {
-        "@emotion/memoize": "^0.8.1"
+        "@emotion/memoize": "^0.9.0"
       }
     },
     "node_modules/@emotion/memoize": {
-      "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz",
-      "integrity": "sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA=="
+      "version": "0.9.0",
+      "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz",
+      "integrity": "sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ=="
     },
     "node_modules/@emotion/react": {
-      "version": "11.11.4",
-      "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.11.4.tgz",
-      "integrity": "sha512-t8AjMlF0gHpvvxk5mAtCqR4vmxiGHCeJBaQO6gncUSdklELOgtwjerNY2yuJNfwnc6vi16U/+uMF+afIawJ9iw==",
+      "version": "11.13.0",
+      "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.13.0.tgz",
+      "integrity": "sha512-WkL+bw1REC2VNV1goQyfxjx1GYJkcc23CRQkXX+vZNLINyfI7o+uUn/rTGPt/xJ3bJHd5GcljgnxHf4wRw5VWQ==",
       "dependencies": {
         "@babel/runtime": "^7.18.3",
-        "@emotion/babel-plugin": "^11.11.0",
-        "@emotion/cache": "^11.11.0",
-        "@emotion/serialize": "^1.1.3",
-        "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1",
-        "@emotion/utils": "^1.2.1",
-        "@emotion/weak-memoize": "^0.3.1",
+        "@emotion/babel-plugin": "^11.12.0",
+        "@emotion/cache": "^11.13.0",
+        "@emotion/serialize": "^1.3.0",
+        "@emotion/use-insertion-effect-with-fallbacks": "^1.1.0",
+        "@emotion/utils": "^1.4.0",
+        "@emotion/weak-memoize": "^0.4.0",
         "hoist-non-react-statics": "^3.3.1"
       },
       "peerDependencies": {
@@ -306,33 +305,33 @@
       }
     },
     "node_modules/@emotion/serialize": {
-      "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.1.4.tgz",
-      "integrity": "sha512-RIN04MBT8g+FnDwgvIUi8czvr1LU1alUMI05LekWB5DGyTm8cCBMCRpq3GqaiyEDRptEXOyXnvZ58GZYu4kBxQ==",
+      "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.3.0.tgz",
+      "integrity": "sha512-jACuBa9SlYajnpIVXB+XOXnfJHyckDfe6fOpORIM6yhBDlqGuExvDdZYHDQGoDf3bZXGv7tNr+LpLjJqiEQ6EA==",
       "dependencies": {
-        "@emotion/hash": "^0.9.1",
-        "@emotion/memoize": "^0.8.1",
-        "@emotion/unitless": "^0.8.1",
-        "@emotion/utils": "^1.2.1",
+        "@emotion/hash": "^0.9.2",
+        "@emotion/memoize": "^0.9.0",
+        "@emotion/unitless": "^0.9.0",
+        "@emotion/utils": "^1.4.0",
         "csstype": "^3.0.2"
       }
     },
     "node_modules/@emotion/sheet": {
-      "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.2.2.tgz",
-      "integrity": "sha512-0QBtGvaqtWi+nx6doRwDdBIzhNdZrXUppvTM4dtZZWEGTXL/XE/yJxLMGlDT1Gt+UHH5IX1n+jkXyytE/av7OA=="
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.4.0.tgz",
+      "integrity": "sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg=="
     },
     "node_modules/@emotion/styled": {
-      "version": "11.11.5",
-      "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.11.5.tgz",
-      "integrity": "sha512-/ZjjnaNKvuMPxcIiUkf/9SHoG4Q196DRl1w82hQ3WCsjo1IUR8uaGWrC6a87CrYAW0Kb/pK7hk8BnLgLRi9KoQ==",
+      "version": "11.13.0",
+      "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.13.0.tgz",
+      "integrity": "sha512-tkzkY7nQhW/zC4hztlwucpT8QEZ6eUzpXDRhww/Eej4tFfO0FxQYWRyg/c5CCXa4d/f174kqeXYjuQRnhzf6dA==",
       "dependencies": {
         "@babel/runtime": "^7.18.3",
-        "@emotion/babel-plugin": "^11.11.0",
-        "@emotion/is-prop-valid": "^1.2.2",
-        "@emotion/serialize": "^1.1.4",
-        "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1",
-        "@emotion/utils": "^1.2.1"
+        "@emotion/babel-plugin": "^11.12.0",
+        "@emotion/is-prop-valid": "^1.3.0",
+        "@emotion/serialize": "^1.3.0",
+        "@emotion/use-insertion-effect-with-fallbacks": "^1.1.0",
+        "@emotion/utils": "^1.4.0"
       },
       "peerDependencies": {
         "@emotion/react": "^11.0.0-rc.0",
@@ -345,27 +344,27 @@
       }
     },
     "node_modules/@emotion/unitless": {
-      "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz",
-      "integrity": "sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ=="
+      "version": "0.9.0",
+      "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.9.0.tgz",
+      "integrity": "sha512-TP6GgNZtmtFaFcsOgExdnfxLLpRDla4Q66tnenA9CktvVSdNKDvMVuUah4QvWPIpNjrWsGg3qeGo9a43QooGZQ=="
     },
     "node_modules/@emotion/use-insertion-effect-with-fallbacks": {
-      "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz",
-      "integrity": "sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw==",
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.1.0.tgz",
+      "integrity": "sha512-+wBOcIV5snwGgI2ya3u99D7/FJquOIniQT1IKyDsBmEgwvpxMNeS65Oib7OnE2d2aY+3BU4OiH+0Wchf8yk3Hw==",
       "peerDependencies": {
         "react": ">=16.8.0"
       }
     },
     "node_modules/@emotion/utils": {
-      "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.2.1.tgz",
-      "integrity": "sha512-Y2tGf3I+XVnajdItskUCn6LX+VUDmP6lTL4fcqsXAv43dnlbZiuW4MWQW38rW/BVWSE7Q/7+XQocmpnRYILUmg=="
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.4.0.tgz",
+      "integrity": "sha512-spEnrA1b6hDR/C68lC2M7m6ALPUHZC0lIY7jAS/B/9DuuO1ZP04eov8SMv/6fwRd8pzmsn2AuJEznRREWlQrlQ=="
     },
     "node_modules/@emotion/weak-memoize": {
-      "version": "0.3.1",
-      "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz",
-      "integrity": "sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww=="
+      "version": "0.4.0",
+      "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz",
+      "integrity": "sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg=="
     },
     "node_modules/@eslint-community/eslint-utils": {
       "version": "4.4.0",
@@ -461,20 +460,20 @@
       }
     },
     "node_modules/@floating-ui/core": {
-      "version": "1.6.4",
-      "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.4.tgz",
-      "integrity": "sha512-a4IowK4QkXl4SCWTGUR0INAfEOX3wtsYw3rKK5InQEHMGObkR8Xk44qYQD9P4r6HHw0iIfK6GUKECmY8sTkqRA==",
+      "version": "1.6.5",
+      "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.5.tgz",
+      "integrity": "sha512-8GrTWmoFhm5BsMZOTHeGD2/0FLKLQQHvO/ZmQga4tKempYRLz8aqJGqXVuQgisnMObq2YZ2SgkwctN1LOOxcqA==",
       "dependencies": {
-        "@floating-ui/utils": "^0.2.4"
+        "@floating-ui/utils": "^0.2.5"
       }
     },
     "node_modules/@floating-ui/dom": {
-      "version": "1.6.7",
-      "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.7.tgz",
-      "integrity": "sha512-wmVfPG5o2xnKDU4jx/m4w5qva9FWHcnZ8BvzEe90D/RpwsJaTAVYPEPdQ8sbr/N8zZTAHlZUTQdqg8ZUbzHmng==",
+      "version": "1.6.8",
+      "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.8.tgz",
+      "integrity": "sha512-kx62rP19VZ767Q653wsP1XZCGIirkE09E0QUGNYTM/ttbbQHqcGPdSfWFxUyyNLc/W6aoJRBajOSXhP6GXjC0Q==",
       "dependencies": {
         "@floating-ui/core": "^1.6.0",
-        "@floating-ui/utils": "^0.2.4"
+        "@floating-ui/utils": "^0.2.5"
       }
     },
     "node_modules/@floating-ui/react-dom": {
@@ -490,9 +489,9 @@
       }
     },
     "node_modules/@floating-ui/utils": {
-      "version": "0.2.4",
-      "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.4.tgz",
-      "integrity": "sha512-dWO2pw8hhi+WrXq1YJy2yCuWoL20PddgGaqTgVe4cOS9Q6qklXCiA1tJEqX6BEwRNSCP84/afac9hd4MS+zEUA=="
+      "version": "0.2.5",
+      "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.5.tgz",
+      "integrity": "sha512-sTcG+QZ6fdEUObICavU+aB3Mp8HY4n14wYHdxK4fXjPmv3PXZZeY5RaguJmGyeH/CJQhX3fqKUtS4qc1LoHwhQ=="
     },
     "node_modules/@humanwhocodes/config-array": {
       "version": "0.11.14",
@@ -990,14 +989,14 @@
       }
     },
     "node_modules/@mui/x-date-pickers": {
-      "version": "7.10.0",
-      "resolved": "https://registry.npmjs.org/@mui/x-date-pickers/-/x-date-pickers-7.10.0.tgz",
-      "integrity": "sha512-mfJuKOdrrdlH5FskXl0aypRmZuVctNRwn5Xw0aMgE3n1ORCpzDSGCXd5El1/PdH3/3olT+vPFmxXKMQju5UMow==",
+      "version": "7.11.0",
+      "resolved": "https://registry.npmjs.org/@mui/x-date-pickers/-/x-date-pickers-7.11.0.tgz",
+      "integrity": "sha512-+zPWs1dwe7J1nZ2iFhTgCae31BLMYMQ2VtQfHxx21Dh6gbBRy/U7YJZg1LdhfQyE093S3e4A5uMZ6PUWdne7iA==",
       "dependencies": {
-        "@babel/runtime": "^7.24.7",
+        "@babel/runtime": "^7.24.8",
         "@mui/base": "^5.0.0-beta.40",
-        "@mui/system": "^5.16.0",
-        "@mui/utils": "^5.16.0",
+        "@mui/system": "^5.16.2",
+        "@mui/utils": "^5.16.2",
         "@types/react-transition-group": "^4.4.10",
         "clsx": "^2.1.1",
         "prop-types": "^15.8.1",
@@ -1054,15 +1053,35 @@
         }
       }
     },
+    "node_modules/@mui/x-internals": {
+      "version": "7.11.0",
+      "resolved": "https://registry.npmjs.org/@mui/x-internals/-/x-internals-7.11.0.tgz",
+      "integrity": "sha512-GqCYylKiB4cLH9tK4JweJlT2JvPjnpXjS3TEIqtHB4BcSsezhdRrMGzHOO5zCJqkasqTirJh2t6X16Qw1llr4Q==",
+      "dependencies": {
+        "@babel/runtime": "^7.24.8",
+        "@mui/utils": "^5.16.2"
+      },
+      "engines": {
+        "node": ">=14.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/mui-org"
+      },
+      "peerDependencies": {
+        "react": "^17.0.0 || ^18.0.0"
+      }
+    },
     "node_modules/@mui/x-tree-view": {
-      "version": "7.10.0",
-      "resolved": "https://registry.npmjs.org/@mui/x-tree-view/-/x-tree-view-7.10.0.tgz",
-      "integrity": "sha512-9OCAIb0wS5uuEDyjcSwSturrB4RUXBfE0UO/xpKjrMvRzCaAvxbCf2aFILP8uH9NyynYZkIGYfGnlqdAPy2OLg==",
+      "version": "7.11.0",
+      "resolved": "https://registry.npmjs.org/@mui/x-tree-view/-/x-tree-view-7.11.0.tgz",
+      "integrity": "sha512-/nk3hhTW5c4Uk2MIcIujC6w5/e5m8RbfWY0YTfRdHApmcFjeEZDX7O5pky5DojhaALopDuNebr9PlE8QYloaiw==",
       "dependencies": {
-        "@babel/runtime": "^7.24.7",
+        "@babel/runtime": "^7.24.8",
         "@mui/base": "^5.0.0-beta.40",
-        "@mui/system": "^5.16.0",
-        "@mui/utils": "^5.16.0",
+        "@mui/system": "^5.16.2",
+        "@mui/utils": "^5.16.2",
+        "@mui/x-internals": "7.11.0",
         "@types/react-transition-group": "^4.4.10",
         "clsx": "^2.1.1",
         "prop-types": "^15.8.1",
@@ -1208,9 +1227,9 @@
       "dev": true
     },
     "node_modules/@types/eslint": {
-      "version": "8.56.10",
-      "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.10.tgz",
-      "integrity": "sha512-Shavhk87gCtY2fhXDctcfS3e6FdxWkCx1iUZ9eEUbh7rTqlZT0/IzOkCOVt0fCjcFuZ9FPYfuezTBImfHCDBGQ==",
+      "version": "8.56.11",
+      "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.11.tgz",
+      "integrity": "sha512-sVBpJMf7UPo/wGecYOpk2aQya2VUGeHhe38WG7/mN5FufNSubf5VT9Uh9Uyp8/eLJpu1/tuhJ/qTo4mhSB4V4Q==",
       "dev": true,
       "dependencies": {
         "@types/estree": "*",
@@ -1273,9 +1292,9 @@
       "dev": true
     },
     "node_modules/@types/node": {
-      "version": "20.14.10",
-      "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.10.tgz",
-      "integrity": "sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==",
+      "version": "20.14.11",
+      "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.11.tgz",
+      "integrity": "sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==",
       "dev": true,
       "dependencies": {
         "undici-types": "~5.26.4"
@@ -1324,16 +1343,16 @@
       "dev": true
     },
     "node_modules/@typescript-eslint/eslint-plugin": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.16.1.tgz",
-      "integrity": "sha512-SxdPak/5bO0EnGktV05+Hq8oatjAYVY3Zh2bye9pGZy6+jwyR3LG3YKkV4YatlsgqXP28BTeVm9pqwJM96vf2A==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.17.0.tgz",
+      "integrity": "sha512-pyiDhEuLM3PuANxH7uNYan1AaFs5XE0zw1hq69JBvGvE7gSuEoQl1ydtEe/XQeoC3GQxLXyOVa5kNOATgM638A==",
       "dev": true,
       "dependencies": {
         "@eslint-community/regexpp": "^4.10.0",
-        "@typescript-eslint/scope-manager": "7.16.1",
-        "@typescript-eslint/type-utils": "7.16.1",
-        "@typescript-eslint/utils": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1",
+        "@typescript-eslint/scope-manager": "7.17.0",
+        "@typescript-eslint/type-utils": "7.17.0",
+        "@typescript-eslint/utils": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0",
         "graphemer": "^1.4.0",
         "ignore": "^5.3.1",
         "natural-compare": "^1.4.0",
@@ -1357,15 +1376,15 @@
       }
     },
     "node_modules/@typescript-eslint/parser": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.16.1.tgz",
-      "integrity": "sha512-u+1Qx86jfGQ5i4JjK33/FnawZRpsLxRnKzGE6EABZ40KxVT/vWsiZFEBBHjFOljmmV3MBYOHEKi0Jm9hbAOClA==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.17.0.tgz",
+      "integrity": "sha512-puiYfGeg5Ydop8eusb/Hy1k7QmOU6X3nvsqCgzrB2K4qMavK//21+PzNE8qeECgNOIoertJPUC1SpegHDI515A==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/scope-manager": "7.16.1",
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/typescript-estree": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1",
+        "@typescript-eslint/scope-manager": "7.17.0",
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/typescript-estree": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0",
         "debug": "^4.3.4"
       },
       "engines": {
@@ -1385,13 +1404,13 @@
       }
     },
     "node_modules/@typescript-eslint/scope-manager": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.16.1.tgz",
-      "integrity": "sha512-nYpyv6ALte18gbMz323RM+vpFpTjfNdyakbf3nsLvF43uF9KeNC289SUEW3QLZ1xPtyINJ1dIsZOuWuSRIWygw==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.17.0.tgz",
+      "integrity": "sha512-0P2jTTqyxWp9HiKLu/Vemr2Rg1Xb5B7uHItdVZ6iAenXmPo4SZ86yOPCJwMqpCyaMiEHTNqizHfsbmCFT1x9SA==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1"
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0"
       },
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -1402,13 +1421,13 @@
       }
     },
     "node_modules/@typescript-eslint/type-utils": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.16.1.tgz",
-      "integrity": "sha512-rbu/H2MWXN4SkjIIyWcmYBjlp55VT+1G3duFOIukTNFxr9PI35pLc2ydwAfejCEitCv4uztA07q0QWanOHC7dA==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.17.0.tgz",
+      "integrity": "sha512-XD3aaBt+orgkM/7Cei0XNEm1vwUxQ958AOLALzPlbPqb8C1G8PZK85tND7Jpe69Wualri81PLU+Zc48GVKIMMA==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/typescript-estree": "7.16.1",
-        "@typescript-eslint/utils": "7.16.1",
+        "@typescript-eslint/typescript-estree": "7.17.0",
+        "@typescript-eslint/utils": "7.17.0",
         "debug": "^4.3.4",
         "ts-api-utils": "^1.3.0"
       },
@@ -1429,9 +1448,9 @@
       }
     },
     "node_modules/@typescript-eslint/types": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.16.1.tgz",
-      "integrity": "sha512-AQn9XqCzUXd4bAVEsAXM/Izk11Wx2u4H3BAfQVhSfzfDOm/wAON9nP7J5rpkCxts7E5TELmN845xTUCQrD1xIQ==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.17.0.tgz",
+      "integrity": "sha512-a29Ir0EbyKTKHnZWbNsrc/gqfIBqYPwj3F2M+jWE/9bqfEHg0AMtXzkbUkOG6QgEScxh2+Pz9OXe11jHDnHR7A==",
       "dev": true,
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -1442,13 +1461,13 @@
       }
     },
     "node_modules/@typescript-eslint/typescript-estree": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.16.1.tgz",
-      "integrity": "sha512-0vFPk8tMjj6apaAZ1HlwM8w7jbghC8jc1aRNJG5vN8Ym5miyhTQGMqU++kuBFDNKe9NcPeZ6x0zfSzV8xC1UlQ==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.17.0.tgz",
+      "integrity": "sha512-72I3TGq93t2GoSBWI093wmKo0n6/b7O4j9o8U+f65TVD0FS6bI2180X5eGEr8MA8PhKMvYe9myZJquUT2JkCZw==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/visitor-keys": "7.16.1",
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/visitor-keys": "7.17.0",
         "debug": "^4.3.4",
         "globby": "^11.1.0",
         "is-glob": "^4.0.3",
@@ -1470,15 +1489,15 @@
       }
     },
     "node_modules/@typescript-eslint/utils": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.16.1.tgz",
-      "integrity": "sha512-WrFM8nzCowV0he0RlkotGDujx78xudsxnGMBHI88l5J8wEhED6yBwaSLP99ygfrzAjsQvcYQ94quDwI0d7E1fA==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.17.0.tgz",
+      "integrity": "sha512-r+JFlm5NdB+JXc7aWWZ3fKSm1gn0pkswEwIYsrGPdsT2GjsRATAKXiNtp3vgAAO1xZhX8alIOEQnNMl3kbTgJw==",
       "dev": true,
       "dependencies": {
         "@eslint-community/eslint-utils": "^4.4.0",
-        "@typescript-eslint/scope-manager": "7.16.1",
-        "@typescript-eslint/types": "7.16.1",
-        "@typescript-eslint/typescript-estree": "7.16.1"
+        "@typescript-eslint/scope-manager": "7.17.0",
+        "@typescript-eslint/types": "7.17.0",
+        "@typescript-eslint/typescript-estree": "7.17.0"
       },
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -1492,12 +1511,12 @@
       }
     },
     "node_modules/@typescript-eslint/visitor-keys": {
-      "version": "7.16.1",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.16.1.tgz",
-      "integrity": "sha512-Qlzzx4sE4u3FsHTPQAAQFJFNOuqtuY0LFrZHwQ8IHK705XxBiWOFkfKRWu6niB7hwfgnwIpO4jTC75ozW1PHWg==",
+      "version": "7.17.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.17.0.tgz",
+      "integrity": "sha512-RVGC9UhPOCsfCdI9pU++K4nD7to+jTcMIbXTSOcrLqUEW6gF2pU1UUbYJKc9cvcRSK1UDeMJ7pdMxf4bhMpV/A==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.16.1",
+        "@typescript-eslint/types": "7.17.0",
         "eslint-visitor-keys": "^3.4.3"
       },
       "engines": {
@@ -1937,18 +1956,6 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
-    "node_modules/array.prototype.toreversed": {
-      "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz",
-      "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==",
-      "dev": true,
-      "dependencies": {
-        "call-bind": "^1.0.2",
-        "define-properties": "^1.2.0",
-        "es-abstract": "^1.22.1",
-        "es-shim-unscopables": "^1.0.0"
-      }
-    },
     "node_modules/array.prototype.tosorted": {
       "version": "1.1.4",
       "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz",
@@ -2109,9 +2116,9 @@
       }
     },
     "node_modules/caniuse-lite": {
-      "version": "1.0.30001642",
-      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001642.tgz",
-      "integrity": "sha512-3XQ0DoRgLijXJErLSl+bLnJ+Et4KqV1PY6JJBGAFlsNsz31zeAIncyeZfLCabHK/jtSh+671RM9YMldxjUPZtA==",
+      "version": "1.0.30001643",
+      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001643.tgz",
+      "integrity": "sha512-ERgWGNleEilSrHM6iUz/zJNSQTP8Mr21wDWpdgvRwcTXGAq6jMtOUPP4dqFPTdKqZ2wKTdtB+uucZ3MRpAUSmg==",
       "dev": true,
       "funding": [
         {
@@ -2450,9 +2457,9 @@
       }
     },
     "node_modules/electron-to-chromium": {
-      "version": "1.4.828",
-      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.828.tgz",
-      "integrity": "sha512-QOIJiWpQJDHAVO4P58pwb133Cwee0nbvy/MV1CwzZVGpkH1RX33N3vsaWRCpR6bF63AAq366neZrRTu7Qlsbbw==",
+      "version": "1.5.0",
+      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.0.tgz",
+      "integrity": "sha512-Vb3xHHYnLseK8vlMJQKJYXJ++t4u1/qJ3vykuVrVjvdiOEhYyT1AuP4x03G8EnPmYvYOhe9T+dADTmthjRQMkA==",
       "dev": true
     },
     "node_modules/enhanced-resolve": {
@@ -2728,15 +2735,14 @@
       }
     },
     "node_modules/eslint-plugin-react": {
-      "version": "7.34.4",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.4.tgz",
-      "integrity": "sha512-Np+jo9bUwJNxCsT12pXtrGhJgT3T44T1sHhn1Ssr42XFn8TES0267wPGo5nNrMHi8qkyimDAX2BUmkf9pSaVzA==",
+      "version": "7.35.0",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.35.0.tgz",
+      "integrity": "sha512-v501SSMOWv8gerHkk+IIQBkcGRGrO2nfybfj5pLxuJNFTPxxA3PSryhXTK+9pNbtkggheDdsC0E9Q8CuPk6JKA==",
       "dev": true,
       "dependencies": {
         "array-includes": "^3.1.8",
         "array.prototype.findlast": "^1.2.5",
         "array.prototype.flatmap": "^1.3.2",
-        "array.prototype.toreversed": "^1.1.2",
         "array.prototype.tosorted": "^1.1.4",
         "doctrine": "^2.1.0",
         "es-iterator-helpers": "^1.0.19",
@@ -2757,7 +2763,7 @@
         "node": ">=4"
       },
       "peerDependencies": {
-        "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8"
+        "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7"
       }
     },
     "node_modules/eslint-plugin-react-hooks": {
@@ -3581,9 +3587,9 @@
       }
     },
     "node_modules/import-local": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz",
-      "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==",
+      "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz",
+      "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==",
       "dev": true,
       "dependencies": {
         "pkg-dir": "^4.2.0",
@@ -3725,9 +3731,9 @@
       }
     },
     "node_modules/is-core-module": {
-      "version": "2.14.0",
-      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.14.0.tgz",
-      "integrity": "sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==",
+      "version": "2.15.0",
+      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz",
+      "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==",
       "dependencies": {
         "hasown": "^2.0.2"
       },
@@ -4414,9 +4420,9 @@
       "dev": true
     },
     "node_modules/node-releases": {
-      "version": "2.0.14",
-      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz",
-      "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==",
+      "version": "2.0.18",
+      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz",
+      "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==",
       "dev": true
     },
     "node_modules/normalize-path": {
@@ -5154,9 +5160,9 @@
       "dev": true
     },
     "node_modules/semver": {
-      "version": "7.6.2",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
-      "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
+      "version": "7.6.3",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
+      "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
       "dev": true,
       "bin": {
         "semver": "bin/semver.js"
@@ -5795,9 +5801,9 @@
       }
     },
     "node_modules/typescript": {
-      "version": "5.5.3",
-      "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
-      "integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
+      "version": "5.5.4",
+      "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz",
+      "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==",
       "dev": true,
       "bin": {
         "tsc": "bin/tsc",

+ 6 - 6
frontend/taipy/src/CoreSelector.tsx

@@ -509,10 +509,10 @@ const CoreSelector = (props: CoreSelectorProps) => {
     // filters
     const colFilters = useMemo(() => {
         try {
-            const res = props.filter ? (JSON.parse(props.filter) as Array<[string, string, string[]]>) : undefined;
+            const res = props.filter ? (JSON.parse(props.filter) as Array<[string, string, string, string[]]>) : undefined;
             return Array.isArray(res)
-                ? res.reduce((pv, [name, coltype, lov], idx) => {
-                      pv[name] = { dfid: name, type: coltype, index: idx, filter: true, lov: lov, freeLov: !!lov };
+                ? res.reduce((pv, [name, id, coltype, lov], idx) => {
+                      pv[name] = { dfid: id, title: name, type: coltype, index: idx, filter: true, lov: lov, freeLov: !!lov };
                       return pv;
                   }, {} as Record<string, ColumnDesc>)
                 : undefined;
@@ -554,10 +554,10 @@ const CoreSelector = (props: CoreSelectorProps) => {
     // sort
     const colSorts = useMemo(() => {
         try {
-            const res = props.sort ? (JSON.parse(props.sort) as Array<[string]>) : undefined;
+            const res = props.sort ? (JSON.parse(props.sort) as Array<[string, string]>) : undefined;
             return Array.isArray(res)
-                ? res.reduce((pv, [name], idx) => {
-                      pv[name] = { dfid: name, type: "str", index: idx };
+                ? res.reduce((pv, [name, id], idx) => {
+                      pv[name] = { dfid: id, title: name, type: "str", index: idx };
                       return pv;
                   }, {} as Record<string, ColumnDesc>)
                 : undefined;

+ 22 - 5
frontend/taipy/src/ScenarioViewer.tsx

@@ -366,7 +366,13 @@ const ScenarioViewer = (props: ScenarioViewerProps) => {
             }
         }
         setValid(!!sc);
-        setScenario((oldSc) => (oldSc === sc ? oldSc : sc ? (deepEqual(oldSc, sc) ? oldSc : sc) : invalidScenario));
+        setScenario((oldSc) => {
+            if (oldSc === sc || (sc && deepEqual(oldSc, sc))) {
+                return oldSc;
+            }
+            setSubmissionStatus(-1);
+            return sc || invalidScenario;
+        });
     }, [props.scenario, props.defaultScenario]);
 
     const [
@@ -397,7 +403,18 @@ const ScenarioViewer = (props: ScenarioViewerProps) => {
     const onDeleteScenario = useCallback(() => {
         setDeleteDialogOpen(false);
         if (valid) {
-            dispatch(createSendActionNameAction(id, module, { action: props.onDelete, error_id: getUpdateVar(updateScVars, "error_id") }, undefined, undefined, true, true, { id: scId }));
+            dispatch(
+                createSendActionNameAction(
+                    id,
+                    module,
+                    { action: props.onDelete, error_id: getUpdateVar(updateScVars, "error_id") },
+                    undefined,
+                    undefined,
+                    true,
+                    true,
+                    { id: scId }
+                )
+            );
         }
     }, [valid, props.onDelete, scId, id, dispatch, module, updateScVars]);
 
@@ -424,6 +441,9 @@ const ScenarioViewer = (props: ScenarioViewerProps) => {
         [expandable]
     );
 
+    // Submission status
+    const [submissionStatus, setSubmissionStatus] = useState(-1);
+
     // submits
     const submitSequence = useCallback(
         (label: string) => {
@@ -577,9 +597,6 @@ const ScenarioViewer = (props: ScenarioViewerProps) => {
 
     const addSequenceHandler = useCallback(() => setSequences((seq) => [...seq, ["", [], "", true]]), []);
 
-    // Submission status
-    const [submissionStatus, setSubmissionStatus] = useState(-1);
-
     // on scenario change
     useEffect(() => {
         showTags && setTags(scTags);

+ 5 - 5
taipy/_cli/_scaffold_cli.py → taipy/_cli/_create_cli.py

@@ -22,11 +22,11 @@ from ._base_cli._abstract_cli import _AbstractCLI
 from ._base_cli._taipy_parser import _TaipyParser
 
 
-class _ScaffoldCLI(_AbstractCLI):
+class _CreateCLI(_AbstractCLI):
     _template_map: Dict[str, str] = {}
 
     _COMMAND_NAME = "create"
-    _ARGUMENTS = ["--template"]
+    _ARGUMENTS = ["--application"]
 
     @classmethod
     def generate_template_map(cls, template_path: Optional[pathlib.Path] = None):
@@ -49,10 +49,10 @@ class _ScaffoldCLI(_AbstractCLI):
             help="Create a new Taipy application using pre-defined templates.",
         )
         create_parser.add_argument(
-            "--template",
+            "--application",
             choices=list(cls._template_map.keys()),
             default="default",
-            help="The Taipy template to create new application.",
+            help="The template used to create the new Taipy application.",
         )
 
     @classmethod
@@ -61,7 +61,7 @@ class _ScaffoldCLI(_AbstractCLI):
         if not args:
             return
         try:
-            cookiecutter(cls._template_map[args.template])
+            cookiecutter(cls._template_map[args.application])
         except OutputDirExistsException as err:
             error_msg = f"{str(err)}. Please remove the existing directory or provide a new folder name."
             print(error_msg)  # noqa: T201

+ 4 - 4
taipy/_entrypoint.py

@@ -19,9 +19,9 @@ from taipy.core._entity._migrate_cli import _MigrateCLI
 from taipy.core._version._cli._version_cli import _VersionCLI
 from taipy.gui._gui_cli import _GuiCLI
 
+from ._cli._create_cli import _CreateCLI
 from ._cli._help_cli import _HelpCLI
 from ._cli._run_cli import _RunCLI
-from ._cli._scaffold_cli import _ScaffoldCLI
 from .version import _get_version
 
 
@@ -46,8 +46,8 @@ def _entrypoint():
     _CoreCLI.create_run_parser()
 
     _VersionCLI.create_parser()
-    _ScaffoldCLI.generate_template_map()
-    _ScaffoldCLI.create_parser()
+    _CreateCLI.generate_template_map()
+    _CreateCLI.create_parser()
     _MigrateCLI.create_parser()
     _HelpCLI.create_parser()
 
@@ -65,7 +65,7 @@ def _entrypoint():
     _HelpCLI.handle_command()
     _VersionCLI.handle_command()
     _MigrateCLI.handle_command()
-    _ScaffoldCLI.handle_command()
+    _CreateCLI.handle_command()
 
     _TaipyParser._remove_argument("help")
     _TaipyParser._parser.print_help()

+ 0 - 1
taipy/core/_entity/_migrate/__init__.py

@@ -11,4 +11,3 @@
 
 from ._migrate_fs import _migrate_fs_entities, _remove_backup_file_entities, _restore_migrate_file_entities
 from ._migrate_mongo import _migrate_mongo_entities, _remove_backup_mongo_entities, _restore_migrate_mongo_entities
-from ._migrate_sql import _migrate_sql_entities, _remove_backup_sql_entities, _restore_migrate_sql_entities

+ 0 - 205
taipy/core/_entity/_migrate/_migrate_sql.py

@@ -1,205 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import json
-import os
-import shutil
-import sqlite3
-from contextlib import closing
-from typing import Dict, Tuple
-
-from taipy.logger._taipy_logger import _TaipyLogger
-
-from ._utils import _migrate
-
-__logger = _TaipyLogger._get_logger()
-
-
-def _load_all_entities_from_sql(db_file: str) -> Tuple[Dict, Dict]:
-    conn = sqlite3.connect(db_file)
-    with closing(conn):
-        query = "SELECT model_id, document FROM taipy_model"
-        query_version = "SELECT * FROM taipy_version"
-        cursor = conn.execute(query)
-        entities = {}
-        versions = {}
-
-        for row in cursor:
-            _id = row[0]
-            document = row[1]
-            entities[_id] = {"data": json.loads(document)}
-
-        cursor = conn.execute(query_version)
-        for row in cursor:
-            id = row[0]
-            config_id = row[1]
-            creation_date = row[2]
-            is_production = row[3]
-            is_development = row[4]
-            is_latest = row[5]
-            versions[id] = {
-                "config_id": config_id,
-                "creation_date": creation_date,
-                "is_production": is_production,
-                "is_development": is_development,
-                "is_latest": is_latest,
-            }
-
-    return entities, versions
-
-
-def __insert_scenario(scenario: dict, conn):
-    query = f"""
-        INSERT INTO scenario (id, config_id, tasks, additional_data_nodes, creation_date, primary_scenario, subscribers,
-         tags, version, pipelines, cycle)
-        VALUES ({scenario['id']}, {scenario['config_id']}, {scenario['tasks']}, {scenario['additional_data_nodes']},
-        {scenario['creation_date']}, {scenario['primary_scenario']}, {scenario['subscribers']}, {scenario['tags']},
-        {scenario['version']}, {scenario['pipelines']}, {scenario['cycle']})
-        """
-    conn.execute(query)
-    conn.commit()
-
-
-def __insert_task(task: dict, conn):
-    query = f"""
-        INSERT INTO task (id, owner_id, parent_ids, config_id, input_ids, function_name, function_module, output_ids,
-        version, skippable, properties)
-        VALUES ({task['id']}, {task['owner_id']}, {task['parent_ids']}, {task['config_id']}, {task['input_ids']},
-         {task['function_name']}, {task['function_module']}, {task['output_ids']}, {task['version']},
-         {task['skippable']}, {task['properties']})
-    """
-    conn.execute(query)
-    conn.commit()
-
-
-def __insert_datanode(datanode: dict, conn):
-    query = f"""
-        INSERT INTO data_node (id, config_id, scope, storage_type, name, owner_id, parent_ids, last_edit_date, edits,
-        version, validity_days, validity_seconds, edit_in_progress, data_node_properties)
-        VALUES ({datanode['id']}, {datanode['config_id']}, {datanode['scope']}, {datanode['storage_type']},
-        {datanode['name']}, {datanode['owner_id']}, {datanode['parent_ids']}, {datanode['last_edit_date']},
-        {datanode['edits']}, {datanode['version']}, {datanode['validity_days']}, {datanode['validity_seconds']},
-        {datanode['edit_in_progress']}, {datanode['data_node_properties']})
-    """
-    conn.execute(query)
-    conn.commit()
-
-
-def __insert_job(job: dict, conn):
-    query = f"""
-        INSERT INTO job (id, task_id, status, force, submit_id, submit_entity_id, creation_date, subscribers,
-        stacktrace, version)
-        VALUES ({job['id']}, {job['task_id']}, {job['status']}, {job['force']}, {job['submit_id']},
-        {job['submit_entity_id']}, {job['creation_date']}, {job['subscribers']}, {job['stacktrace']}, {job['version']})
-    """
-    conn.execute(query)
-    conn.commit()
-
-
-def __insert_cycle(cycle: dict, conn):
-    query = f"""
-        INSERT INTO scenario (id, name, frequency, properties, creation_date, start_date, end_date)
-        VALUES ({cycle['id']}, {cycle['name']}, {cycle['frequency']}, {cycle['properties']}, {cycle['creation_date']},
-        {cycle['start_date']}, {cycle['end_date']})
-    """
-    conn.execute(query)
-    conn.commit()
-
-
-def __insert_version(version: dict, conn):
-    query = f"""
-        INSERT INTO version (id, config_id, creation_date, is_production, is_development, is_latest)
-        VALUES ({version['id']}, {version['config_id']}, {version['creation_date']}, {version['is_production']},
-        {version['is_development']}, {version['is_latest']})
-    """
-    conn.execute(query)
-    conn.commit()
-
-
-def __write_entities_to_sql(_entities: Dict, _versions: Dict, db_file: str):
-    conn = sqlite3.connect(db_file)
-    with closing(conn):
-        for k, entity in _entities.items():
-            if "SCENARIO" in k:
-                __insert_scenario(entity["data"], conn)
-            elif "TASK" in k:
-                __insert_task(entity["data"], conn)
-            elif "DATANODE" in k:
-                __insert_datanode(entity["data"], conn)
-            elif "JOB" in k:
-                __insert_job(entity["data"], conn)
-            elif "CYCLE" in k:
-                __insert_cycle(entity["data"], conn)
-
-        for _, version in _versions.items():
-            __insert_version(version, conn)
-
-
-def _restore_migrate_sql_entities(path: str) -> bool:
-    file_name, file_extension = path.rsplit(".", 1)
-    backup_path = f"{file_name}_backup.{file_extension}"
-
-    if not os.path.exists(backup_path):
-        __logger.error(f"The backup database '{backup_path}' does not exist.")
-        return False
-
-    if os.path.exists(path):
-        os.remove(path)
-    else:
-        __logger.warning(f"The original entities database '{path}' does not exist.")
-
-    os.rename(backup_path, path)
-    __logger.info(f"Restored entities from the backup database '{backup_path}' to '{path}'.")
-    return True
-
-
-def _remove_backup_sql_entities(path: str) -> bool:
-    file_name, file_extension = path.rsplit(".", 1)
-    backup_path = f"{file_name}_backup.{file_extension}"
-    if not os.path.exists(backup_path):
-        __logger.error(f"The backup database '{backup_path}' does not exist.")
-        return False
-
-    os.remove(backup_path)
-    __logger.info(f"Removed backup entities from the backup database '{backup_path}'.")
-    return True
-
-
-def _migrate_sql_entities(path: str, backup: bool = True) -> bool:
-    """Migrate entities from sqlite database to the current version.
-
-    Args:
-        path (str): The path to the sqlite database.
-        backup (bool, optional): Whether to backup the entities before migrating. Defaults to True.
-
-    Returns:
-        bool: True if the migration was successful, False otherwise.
-    """
-    if not path:
-        __logger.error("Missing the required sqlite path.")
-        return False
-    if not os.path.exists(path):
-        __logger.error(f"File '{path}' does not exist.")
-        return False
-
-    if backup:
-        file_name, file_extension = path.rsplit(".", 1)
-        shutil.copyfile(path, f"{file_name}_backup.{file_extension}")
-        __logger.info(f"Backed up entities from '{path}' to '{file_name}_backup.{file_extension}' before migration.")
-
-    __logger.info(f"Starting entity migration from sqlite database '{path}'")
-
-    entities, versions = _load_all_entities_from_sql(path)
-    entities, versions = _migrate(entities, versions)
-    __write_entities_to_sql(entities, versions, path)
-
-    __logger.info("Migration finished")
-    return True

+ 2 - 15
taipy/core/_entity/_migrate_cli.py

@@ -19,13 +19,10 @@ from taipy.config.config import Config
 from ._migrate import (
     _migrate_fs_entities,
     _migrate_mongo_entities,
-    _migrate_sql_entities,
     _remove_backup_file_entities,
     _remove_backup_mongo_entities,
-    _remove_backup_sql_entities,
     _restore_migrate_file_entities,
     _restore_migrate_mongo_entities,
-    _restore_migrate_sql_entities,
 )
 
 
@@ -43,8 +40,8 @@ class _MigrateCLI(_AbstractCLI):
         migrate_parser.add_argument(
             "--repository-type",
             nargs="+",
-            help="The type of repository to migrate. If filesystem or sql, a path to the database folder/.sqlite file "
-            "should be informed. In case of mongo host, port, user and password must be informed, if left empty it "
+            help="The type of repository to migrate. If filesystem, a path to the database folder should be informed. "
+            "In case of mongo host, port, user and password must be informed, if left empty it "
             "is assumed default values",
         )
         migrate_parser.add_argument(
@@ -92,9 +89,6 @@ class _MigrateCLI(_AbstractCLI):
             path = repository_args[0] or Config.core.taipy_storage_folder
             if not _remove_backup_file_entities(path):
                 sys.exit(1)
-        elif repository_type == "sql":
-            if not _remove_backup_sql_entities(repository_args[0]):
-                sys.exit(1)
         elif repository_type == "mongo":
             if not _remove_backup_mongo_entities():
                 sys.exit(1)
@@ -110,9 +104,6 @@ class _MigrateCLI(_AbstractCLI):
             path = repository_args[0] or Config.core.taipy_storage_folder
             if not _restore_migrate_file_entities(path):
                 sys.exit(1)
-        elif repository_type == "sql":
-            if not _restore_migrate_sql_entities(repository_args[0]):
-                sys.exit(1)
         elif repository_type == "mongo":
             mongo_args = repository_args[1:5] if repository_args[0] else []
             if not _restore_migrate_mongo_entities(*mongo_args):
@@ -129,10 +120,6 @@ class _MigrateCLI(_AbstractCLI):
             if not _migrate_fs_entities(path, do_backup):
                 sys.exit(1)
 
-        elif repository_type == "sql":
-            if not _migrate_sql_entities(repository_args[0], do_backup):
-                sys.exit(1)
-
         elif repository_type == "mongo":
             mongo_args = repository_args[1:5] if repository_args[0] else []
             _migrate_mongo_entities(*mongo_args, backup=do_backup)  # type: ignore

+ 0 - 4
taipy/core/_repository/_base_taipy_model.py

@@ -14,15 +14,11 @@ import enum
 import json
 from typing import Any, Dict
 
-from sqlalchemy import Table
-
 from ._decoder import _Decoder
 from ._encoder import _Encoder
 
 
 class _BaseModel:
-    __table__: Table
-
     def __iter__(self):
         yield from self.__dict__.items()
 

+ 0 - 236
taipy/core/_repository/_sql_repository.py

@@ -1,236 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import json
-import pathlib
-from sqlite3 import DatabaseError
-from typing import Any, Dict, Iterable, List, Optional, Type, Union
-
-from sqlalchemy.dialects import sqlite
-from sqlalchemy.exc import NoResultFound, OperationalError
-
-from ...logger._taipy_logger import _TaipyLogger
-from .._repository._abstract_repository import _AbstractRepository
-from ..common._utils import _retry_repository_operation
-from ..common.typing import Converter, Entity, ModelType
-from ..exceptions import ModelNotFound
-from .db._sql_connection import _SQLConnection
-
-
-class _SQLRepository(_AbstractRepository[ModelType, Entity]):
-    __EXCEPTIONS_TO_RETRY = (OperationalError,)
-    _logger = _TaipyLogger._get_logger()
-
-    def __init__(self, model_type: Type[ModelType], converter: Type[Converter]):
-        """
-        Holds common methods to be used and extended when the need for saving
-        dataclasses in a sqlite database.
-
-        Some lines have type: ignore because MyPy won't recognize some generic attributes. This
-        should be revised in the future.
-
-        Attributes:
-            model_type: Generic dataclass.
-            converter: A class that handles conversion to and from a database backend
-            db: An sqlite3 session object
-        """
-        self.db = _SQLConnection.init_db()
-        self.model_type = model_type
-        self.converter = converter
-        self.table = self.model_type.__table__
-
-    ###############################
-    # ##   Inherited methods   ## #
-    ###############################
-    def _save(self, entity: Entity):
-        obj = self.converter._entity_to_model(entity)
-        if self._exists(entity.id):  # type: ignore
-            try:
-                self._update_entry(obj)
-                return
-            except DatabaseError as e:
-                self._logger.error(f"Error while updating {entity.id} in {self.table.name}. ")  # type: ignore
-                self._logger.error(f"Error : {e}")
-                raise e
-        try:
-            self.__insert_model(obj)
-        except DatabaseError as e:
-            self._logger.error(f"Error while inserting {entity.id} into {self.table.name}. ")  # type: ignore
-            self._logger.error(f"Error : {e}")
-            raise e
-
-    def _exists(self, entity_id: str):
-        query = self.table.select().filter_by(id=entity_id)
-        return bool(self.db.execute(str(query), [entity_id]).fetchone())
-
-    @_retry_repository_operation(__EXCEPTIONS_TO_RETRY)
-    def _load(self, entity_id: str) -> Entity:
-        query = self.table.select().filter_by(id=entity_id)
-
-        if entry := self.db.execute(str(query.compile(dialect=sqlite.dialect())), [entity_id]).fetchone():
-            entry = self.model_type.from_dict(entry)
-            return self.converter._model_to_entity(entry)
-        raise ModelNotFound(str(self.model_type.__name__), entity_id)
-
-    def _load_all(self, filters: Optional[List[Dict]] = None) -> List[Entity]:
-        query = self.table.select()
-        entities: List[Entity] = []
-
-        for f in filters or [{}]:
-            filtered_query = query.filter_by(**f)
-            try:
-                entries = self.db.execute(
-                    str(filtered_query.compile(dialect=sqlite.dialect())),
-                    [self.__serialize_filter_values(val) for val in list(f.values())],
-                ).fetchall()
-
-                entities.extend([self.converter._model_to_entity(self.model_type.from_dict(m)) for m in entries])
-            except NoResultFound:
-                continue
-        return entities
-
-    def _delete(self, entity_id: str):
-        delete_query = self.table.delete().filter_by(id=entity_id)
-        cursor = self.db.execute(str(delete_query.compile(dialect=sqlite.dialect())), [entity_id])
-
-        if cursor.rowcount == 0:
-            raise ModelNotFound(str(self.model_type.__name__), entity_id)
-
-        self.db.commit()
-
-    def _delete_all(self):
-        self.db.execute(str(self.table.delete().compile(dialect=sqlite.dialect())))
-        self.db.commit()
-
-    def _delete_many(self, ids: Iterable[str]):
-        for entity_id in ids:
-            self._delete(entity_id)
-
-    def _delete_by(self, attribute: str, value: str):
-        delete_by_query = self.table.delete().filter_by(**{attribute: value})
-
-        self.db.execute(str(delete_by_query.compile(dialect=sqlite.dialect())), [value])
-        self.db.commit()
-
-    def _search(self, attribute: str, value: Any, filters: Optional[List[Dict]] = None) -> List[Entity]:
-        query = self.table.select().filter_by(**{attribute: value})
-
-        entities: List[Entity] = []
-        for f in filters or [{}]:
-            entries = self.db.execute(
-                str(query.filter_by(**f).compile(dialect=sqlite.dialect())),
-                [value] + [self.__serialize_filter_values(val) for val in list(f.values())],
-            ).fetchall()
-            entities.extend([self.converter._model_to_entity(self.model_type.from_dict(m)) for m in entries])
-
-        return entities
-
-    def _export(self, entity_id: str, folder_path: Union[str, pathlib.Path]):
-        if isinstance(folder_path, str):
-            folder: pathlib.Path = pathlib.Path(folder_path)
-        else:
-            folder = folder_path
-
-        export_dir = folder / self.table.name
-        if not export_dir.exists():
-            export_dir.mkdir(parents=True)
-
-        export_path = export_dir / f"{entity_id}.json"
-
-        query = self.table.select().filter_by(id=entity_id)
-
-        if entry := self.db.execute(str(query.compile(dialect=sqlite.dialect())), [entity_id]).fetchone():
-            with open(export_path, "w", encoding="utf-8") as export_file:
-                export_file.write(json.dumps(entry))
-        else:
-            raise ModelNotFound(self.model_type, entity_id)  # type: ignore
-
-    ###########################################
-    # ##   Specific or optimized methods   ## #
-    ###########################################
-    def _get_multi(self, *, skip: int = 0, limit: int = 100) -> List[ModelType]:
-        query = self.table.select().offset(skip).limit(limit)
-        return self.db.execute(str(query.compile(dialect=sqlite.dialect()))).fetchall()
-
-    def _get_by_config(self, config_id: Any) -> Optional[ModelType]:
-        query = self.table.select().filter_by(config_id=config_id)
-        return self.db.execute(str(query.compile(dialect=sqlite.dialect())), [config_id]).fetchall()
-
-    def _get_by_config_and_owner_id(
-        self, config_id: str, owner_id: Optional[str], filters: Optional[List[Dict]] = None
-    ) -> Optional[Entity]:
-        if not filters:
-            filters = [{}]
-        if entry := self.__get_entities_by_config_and_owner(config_id, owner_id, filters):
-            return self.converter._model_to_entity(entry)
-        return None
-
-    def _get_by_configs_and_owner_ids(self, configs_and_owner_ids, filters: Optional[List[Dict]] = None):
-        # Design in order to optimize performance on Entity creation.
-        # Maintainability and readability were impacted.
-        if not filters:
-            filters = [{}]
-        res = {}
-        configs_and_owner_ids = set(configs_and_owner_ids)
-
-        for config, owner in configs_and_owner_ids:
-            if entry := self.__get_entities_by_config_and_owner(config.id, owner, filters):
-                entity = self.converter._model_to_entity(entry)
-                key = config, owner
-                res[key] = entity
-
-        return res
-
-    def __get_entities_by_config_and_owner(
-        self, config_id: str, owner_id: Optional[str] = None, filters: Optional[List[Dict]] = None
-    ) -> Optional[ModelType]:
-        if not filters:
-            filters = []
-        versions = [item.get("version") for item in filters if item.get("version")]
-
-        query = self.table.select().filter_by(config_id=config_id)
-        parameters: List = [config_id]
-
-        if owner_id:
-            parameters.append(owner_id)
-        query = query.filter_by(owner_id=owner_id)
-        query = str(query.compile(dialect=sqlite.dialect()))
-
-        if versions:
-            table_name = self.table.name
-            query += f" AND {table_name}.version IN ({','.join(['?'] * len(versions))})"
-            parameters.extend(versions)
-
-        if entry := self.db.execute(query, parameters).fetchone():
-            return self.model_type.from_dict(entry)
-        return None
-
-    #############################
-    # ##   Private methods   ## #
-    #############################
-    @_retry_repository_operation(__EXCEPTIONS_TO_RETRY)
-    def __insert_model(self, model: ModelType):
-        query = self.table.insert()
-        self.db.execute(str(query.compile(dialect=sqlite.dialect())), model.to_list())
-        self.db.commit()
-
-    @_retry_repository_operation(__EXCEPTIONS_TO_RETRY)
-    def _update_entry(self, model):
-        query = self.table.update().filter_by(id=model.id)
-        cursor = self.db.execute(str(query.compile(dialect=sqlite.dialect())), model.to_list() + [model.id])
-        self.db.commit()
-        cursor.close()
-
-    @staticmethod
-    def __serialize_filter_values(value):
-        if isinstance(value, (dict, list)):
-            return json.dumps(value).replace('"', "'")
-        return value

+ 0 - 10
taipy/core/_repository/db/__init__.py

@@ -1,10 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.

+ 0 - 77
taipy/core/_repository/db/_sql_connection.py

@@ -1,77 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import sqlite3
-from functools import lru_cache
-from sqlite3 import Connection
-
-from sqlalchemy.dialects import sqlite
-from sqlalchemy.schema import CreateTable
-
-from taipy.config.config import Config
-
-from ...exceptions import MissingRequiredProperty
-
-
-def dict_factory(cursor, row):
-    return {col[0]: row[idx] for idx, col in enumerate(cursor.description)}
-
-
-class _SQLConnection:
-    _connection = None
-
-    @classmethod
-    def init_db(cls):
-        if cls._connection:
-            return cls._connection
-
-        cls._connection = _build_connection()
-        cls._connection.row_factory = dict_factory
-
-        from ..._version._version_model import _VersionModel
-        from ...cycle._cycle_model import _CycleModel
-        from ...data._data_model import _DataNodeModel
-        from ...job._job_model import _JobModel
-        from ...scenario._scenario_model import _ScenarioModel
-        from ...submission._submission_model import _SubmissionModel
-        from ...task._task_model import _TaskModel
-
-        cursor = cls._connection.cursor()
-        cursor.execute(str(CreateTable(_CycleModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())))
-        cursor.execute(str(CreateTable(_DataNodeModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())))
-        cursor.execute(str(CreateTable(_JobModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())))
-        cursor.execute(str(CreateTable(_ScenarioModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())))
-        cursor.execute(str(CreateTable(_TaskModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())))
-        cursor.execute(str(CreateTable(_VersionModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())))
-        cursor.execute(
-            str(CreateTable(_SubmissionModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect()))
-        )
-        cursor.close()
-
-        return cls._connection
-
-
-def _build_connection() -> Connection:
-    # Set SQLite threading mode to Serialized, means that threads may share the module, connections and cursors
-    sqlite3.threadsafety = 3
-
-    properties = Config.core.repository_properties
-    try:
-        db_location = properties["db_location"]
-    except KeyError:
-        raise MissingRequiredProperty("Missing property db_location.") from None
-
-    return __build_connection(db_location)
-
-
-@lru_cache
-def __build_connection(db_location: str):
-    return sqlite3.connect(db_location, check_same_thread=False, timeout=20)

+ 1 - 2
taipy/core/_version/_version_manager_factory.py

@@ -15,11 +15,10 @@ from .._manager._manager_factory import _ManagerFactory
 from ..common import _utils
 from ._version_fs_repository import _VersionFSRepository
 from ._version_manager import _VersionManager
-from ._version_sql_repository import _VersionSQLRepository
 
 
 class _VersionManagerFactory(_ManagerFactory):
-    __REPOSITORY_MAP = {"default": _VersionFSRepository, "sql": _VersionSQLRepository}
+    __REPOSITORY_MAP = {"default": _VersionFSRepository}
 
     @classmethod
     def _build_manager(cls) -> Type[_VersionManager]:

+ 1 - 25
taipy/core/_version/_version_model.py

@@ -12,25 +12,11 @@
 from dataclasses import dataclass
 from typing import Any, Dict
 
-from sqlalchemy import Boolean, Column, String, Table
-
 from .._repository._base_taipy_model import _BaseModel
-from .._repository.db._sql_base_model import mapper_registry
 
 
-@mapper_registry.mapped
 @dataclass
 class _VersionModel(_BaseModel):
-    __table__ = Table(
-        "version",
-        mapper_registry.metadata,
-        Column("id", String, primary_key=True),
-        Column("config", String),  # config is store as a json string
-        Column("creation_date", String),
-        Column("is_production", Boolean),
-        Column("is_development", Boolean),
-        Column("is_latest", Boolean),
-    )
     id: str
     config: str
     creation_date: str
@@ -42,17 +28,7 @@ class _VersionModel(_BaseModel):
             config=data["config"],
             creation_date=data["creation_date"],
         )
-        model.is_production = data.get("is_production")  # type: ignore
-        model.is_development = data.get("is_development")  # type: ignore
-        model.is_latest = data.get("is_latest")  # type: ignore
         return model
 
     def to_list(self):
-        return [
-            self.id,
-            self.config,
-            self.creation_date,
-            self.is_production,
-            self.is_development,
-            self.is_latest,
-        ]
+        return [self.id, self.config, self.creation_date]

+ 0 - 84
taipy/core/_version/_version_sql_repository.py

@@ -1,84 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-from sqlalchemy.dialects import sqlite
-
-from .._repository._sql_repository import _SQLRepository
-from ..exceptions.exceptions import ModelNotFound, VersionIsNotProductionVersion
-from ._version_converter import _VersionConverter
-from ._version_model import _VersionModel
-from ._version_repository_interface import _VersionRepositoryInterface
-
-
-class _VersionSQLRepository(_SQLRepository, _VersionRepositoryInterface):
-    def __init__(self) -> None:
-        super().__init__(model_type=_VersionModel, converter=_VersionConverter)
-
-    def _set_latest_version(self, version_number):
-        if old_latest := self.db.execute(str(self.table.select().filter_by(is_latest=True))).fetchone():
-            old_latest = self.model_type.from_dict(old_latest)
-            old_latest.is_latest = False
-            self._update_entry(old_latest)
-
-        version = self.__get_by_id(version_number)
-        version.is_latest = True
-        self._update_entry(version)
-
-    def _get_latest_version(self):
-        if latest := self.db.execute(
-            str(self.table.select().filter_by(is_latest=True).compile(dialect=sqlite.dialect()))
-        ).fetchone():
-            return latest["id"]
-        raise ModelNotFound(self.model_type, "")
-
-    def _set_development_version(self, version_number):
-        if old_development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone():
-            old_development = self.model_type.from_dict(old_development)
-            old_development.is_development = False
-            self._update_entry(old_development)
-
-        version = self.__get_by_id(version_number)
-        version.is_development = True
-        self._update_entry(version)
-
-        self._set_latest_version(version_number)
-
-    def _get_development_version(self):
-        if development := self.db.execute(str(self.table.select().filter_by(is_development=True))).fetchone():
-            return development["id"]
-        raise ModelNotFound(self.model_type, "")
-
-    def _set_production_version(self, version_number):
-        version = self.__get_by_id(version_number)
-        version.is_production = True
-        self._update_entry(version)
-
-        self._set_latest_version(version_number)
-
-    def _get_production_versions(self):
-        if productions := self.db.execute(
-            str(self.table.select().filter_by(is_production=True).compile(dialect=sqlite.dialect())),
-        ).fetchall():
-            return [p["id"] for p in productions]
-        return []
-
-    def _delete_production_version(self, version_number):
-        version = self.__get_by_id(version_number)
-
-        if not version or not version.is_production:
-            raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.")
-        version.is_production = False
-        self._update_entry(version)
-
-    def __get_by_id(self, version_id):
-        query = str(self.table.select().filter_by(id=version_id).compile(dialect=sqlite.dialect()))
-        entry = self.db.execute(query, [version_id]).fetchone()
-        return self.model_type.from_dict(entry) if entry else None

+ 2 - 5
taipy/core/cycle/_cycle_manager_factory.py

@@ -15,18 +15,15 @@ from .._manager._manager_factory import _ManagerFactory
 from ..common._utils import _load_fct
 from ..cycle._cycle_manager import _CycleManager
 from ._cycle_fs_repository import _CycleFSRepository
-from ._cycle_sql_repository import _CycleSQLRepository
 
 
 class _CycleManagerFactory(_ManagerFactory):
-    __REPOSITORY_MAP = {"default": _CycleFSRepository, "sql": _CycleSQLRepository}
+    __REPOSITORY_MAP = {"default": _CycleFSRepository}
 
     @classmethod
     def _build_manager(cls) -> Type[_CycleManager]:
         if cls._using_enterprise():
-            cycle_manager = _load_fct(
-                cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager", "_CycleManager"
-            )  # type: ignore
+            cycle_manager = _load_fct(cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager", "_CycleManager")  # type: ignore
             build_repository = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager_factory", "_CycleManagerFactory"
             )._build_repository  # type: ignore

+ 0 - 15
taipy/core/cycle/_cycle_model.py

@@ -12,29 +12,14 @@
 from dataclasses import dataclass
 from typing import Any, Dict
 
-from sqlalchemy import JSON, Column, Enum, String, Table
-
 from taipy.config.common.frequency import Frequency
 
 from .._repository._base_taipy_model import _BaseModel
-from .._repository.db._sql_base_model import mapper_registry
 from .cycle_id import CycleId
 
 
-@mapper_registry.mapped
 @dataclass
 class _CycleModel(_BaseModel):
-    __table__ = Table(
-        "cycle",
-        mapper_registry.metadata,
-        Column("id", String, primary_key=True),
-        Column("name", String),
-        Column("frequency", Enum(Frequency)),
-        Column("properties", JSON),
-        Column("creation_date", String),
-        Column("start_date", String),
-        Column("end_date", String),
-    )
     id: CycleId
     name: str
     frequency: Frequency

+ 2 - 5
taipy/core/data/_data_manager_factory.py

@@ -15,18 +15,15 @@ from .._manager._manager_factory import _ManagerFactory
 from ..common._utils import _load_fct
 from ._data_fs_repository import _DataFSRepository
 from ._data_manager import _DataManager
-from ._data_sql_repository import _DataSQLRepository
 
 
 class _DataManagerFactory(_ManagerFactory):
-    __REPOSITORY_MAP = {"default": _DataFSRepository, "sql": _DataSQLRepository}
+    __REPOSITORY_MAP = {"default": _DataFSRepository}
 
     @classmethod
     def _build_manager(cls) -> Type[_DataManager]:
         if cls._using_enterprise():
-            data_manager = _load_fct(
-                cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager", "_DataManager"
-            )  # type: ignore
+            data_manager = _load_fct(cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager", "_DataManager")  # type: ignore
             build_repository = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager_factory", "_DataManagerFactory"
             )._build_repository  # type: ignore

+ 0 - 25
taipy/core/data/_data_model.py

@@ -12,39 +12,14 @@
 from dataclasses import dataclass
 from typing import Any, Dict, List, Optional
 
-from sqlalchemy import JSON, Boolean, Column, Enum, Float, String, Table, UniqueConstraint
-
 from taipy.config.common.scope import Scope
 
 from .._repository._base_taipy_model import _BaseModel
-from .._repository.db._sql_base_model import mapper_registry
 from .data_node_id import Edit
 
 
-@mapper_registry.mapped
 @dataclass
 class _DataNodeModel(_BaseModel):
-    __table__ = Table(
-        "data_node",
-        mapper_registry.metadata,
-        Column("id", String, primary_key=True),
-        Column("config_id", String),
-        Column("scope", Enum(Scope)),
-        Column("storage_type", String),
-        Column("owner_id", String),
-        Column("parent_ids", JSON),
-        Column("last_edit_date", String),
-        Column("edits", JSON),
-        Column("version", String),
-        Column("validity_days", Float),
-        Column("validity_seconds", Float),
-        Column("edit_in_progress", Boolean),
-        Column("editor_id", String),
-        Column("editor_expiration_date", String),
-        Column("data_node_properties", JSON),
-    )
-    __table_args__ = (UniqueConstraint("config_id", "owner_id", name="_config_owner_uc"),)
-
     id: str
     config_id: str
     scope: Scope

+ 2 - 5
taipy/core/job/_job_manager_factory.py

@@ -15,18 +15,15 @@ from .._manager._manager_factory import _ManagerFactory
 from ..common._utils import _load_fct
 from ._job_fs_repository import _JobFSRepository
 from ._job_manager import _JobManager
-from ._job_sql_repository import _JobSQLRepository
 
 
 class _JobManagerFactory(_ManagerFactory):
-    __REPOSITORY_MAP = {"default": _JobFSRepository, "sql": _JobSQLRepository}
+    __REPOSITORY_MAP = {"default": _JobFSRepository}
 
     @classmethod
     def _build_manager(cls) -> Type[_JobManager]:
         if cls._using_enterprise():
-            job_manager = _load_fct(
-                cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager", "_JobManager"
-            )  # type: ignore
+            job_manager = _load_fct(cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager", "_JobManager")  # type: ignore
             build_repository = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager_factory", "_JobManagerFactory"
             )._build_repository  # type: ignore

+ 0 - 18
taipy/core/job/_job_model.py

@@ -12,31 +12,13 @@
 from dataclasses import dataclass
 from typing import Any, Dict, List
 
-from sqlalchemy import JSON, Boolean, Column, Enum, String, Table
-
 from .._repository._base_taipy_model import _BaseModel
-from .._repository.db._sql_base_model import mapper_registry
 from .job_id import JobId
 from .status import Status
 
 
-@mapper_registry.mapped
 @dataclass
 class _JobModel(_BaseModel):
-    __table__ = Table(
-        "job",
-        mapper_registry.metadata,
-        Column("id", String, primary_key=True),
-        Column("task_id", String),
-        Column("status", Enum(Status)),
-        Column("force", Boolean),
-        Column("submit_id", String),
-        Column("submit_entity_id", String),
-        Column("creation_date", String),
-        Column("subscribers", JSON),
-        Column("stacktrace", JSON),
-        Column("version", String),
-    )
     id: JobId
     task_id: str
     status: Status

+ 0 - 18
taipy/core/job/_job_sql_repository.py

@@ -1,18 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-from .._repository._sql_repository import _SQLRepository
-from ._job_converter import _JobConverter
-from ._job_model import _JobModel
-
-
-class _JobSQLRepository(_SQLRepository):
-    def __init__(self) -> None:
-        super().__init__(model_type=_JobModel, converter=_JobConverter)

+ 4 - 5
taipy/core/scenario/_scenario_manager.py

@@ -312,11 +312,10 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
     ) -> List[Scenario]:
         """
         Filter a list of scenarios by a given creation time period.
-        The time period is inclusive.
 
         Parameters:
-            created_start_time (Optional[datetime]): Start time of the period.
-            created_end_time (Optional[datetime]): End time of the period.
+            created_start_time (Optional[datetime]): Start time of the period. The start time is inclusive.
+            created_end_time (Optional[datetime]): End time of the period. The end time is exclusive.
 
         Returns:
             List[Scenario]: List of scenarios created in the given time period.
@@ -325,12 +324,12 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
             return scenarios
 
         if not created_start_time:
-            return [scenario for scenario in scenarios if scenario.creation_date <= created_end_time]
+            return [scenario for scenario in scenarios if scenario.creation_date < created_end_time]
 
         if not created_end_time:
             return [scenario for scenario in scenarios if created_start_time <= scenario.creation_date]
 
-        return [scenario for scenario in scenarios if created_start_time <= scenario.creation_date <= created_end_time]
+        return [scenario for scenario in scenarios if created_start_time <= scenario.creation_date < created_end_time]
 
     @classmethod
     def _is_promotable_to_primary(cls, scenario: Union[Scenario, ScenarioId]) -> bool:

+ 1 - 2
taipy/core/scenario/_scenario_manager_factory.py

@@ -15,11 +15,10 @@ from .._manager._manager_factory import _ManagerFactory
 from ..common._utils import _load_fct
 from ._scenario_fs_repository import _ScenarioFSRepository
 from ._scenario_manager import _ScenarioManager
-from ._scenario_sql_repository import _ScenarioSQLRepository
 
 
 class _ScenarioManagerFactory(_ManagerFactory):
-    __REPOSITORY_MAP = {"default": _ScenarioFSRepository, "sql": _ScenarioSQLRepository}
+    __REPOSITORY_MAP = {"default": _ScenarioFSRepository}
 
     @classmethod
     def _build_manager(cls) -> Type[_ScenarioManager]:

+ 0 - 20
taipy/core/scenario/_scenario_model.py

@@ -12,35 +12,15 @@
 from dataclasses import dataclass
 from typing import Any, Dict, List, Optional
 
-from sqlalchemy import JSON, Boolean, Column, String, Table
-
 from .._repository._base_taipy_model import _BaseModel
-from .._repository.db._sql_base_model import mapper_registry
 from ..cycle.cycle_id import CycleId
 from ..data.data_node_id import DataNodeId
 from ..task.task_id import TaskId
 from .scenario_id import ScenarioId
 
 
-@mapper_registry.mapped
 @dataclass
 class _ScenarioModel(_BaseModel):
-    __table__ = Table(
-        "scenario",
-        mapper_registry.metadata,
-        Column("id", String, primary_key=True),
-        Column("config_id", String),
-        Column("tasks", JSON),
-        Column("additional_data_nodes", JSON),
-        Column("properties", JSON),
-        Column("creation_date", String),
-        Column("primary_scenario", Boolean),
-        Column("subscribers", JSON),
-        Column("tags", JSON),
-        Column("version", String),
-        Column("sequences", JSON),
-        Column("cycle", String),
-    )
     id: ScenarioId
     config_id: str
     tasks: List[TaskId]

+ 0 - 18
taipy/core/scenario/_scenario_sql_repository.py

@@ -1,18 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-from .._repository._sql_repository import _SQLRepository
-from ._scenario_converter import _ScenarioConverter
-from ._scenario_model import _ScenarioModel
-
-
-class _ScenarioSQLRepository(_SQLRepository):
-    def __init__(self) -> None:
-        super().__init__(model_type=_ScenarioModel, converter=_ScenarioConverter)

+ 1 - 2
taipy/core/submission/_submission_manager_factory.py

@@ -15,11 +15,10 @@ from .._manager._manager_factory import _ManagerFactory
 from ..common._utils import _load_fct
 from ._submission_fs_repository import _SubmissionFSRepository
 from ._submission_manager import _SubmissionManager
-from ._submission_sql_repository import _SubmissionSQLRepository
 
 
 class _SubmissionManagerFactory(_ManagerFactory):
-    __REPOSITORY_MAP = {"default": _SubmissionFSRepository, "sql": _SubmissionSQLRepository}
+    __REPOSITORY_MAP = {"default": _SubmissionFSRepository}
 
     @classmethod
     def _build_manager(cls) -> Type[_SubmissionManager]:

+ 0 - 23
taipy/core/submission/_submission_model.py

@@ -12,36 +12,13 @@
 from dataclasses import dataclass
 from typing import Any, Dict, List, Optional, Union
 
-from sqlalchemy import JSON, Boolean, Column, Enum, String, Table
-
 from .._repository._base_taipy_model import _BaseModel
-from .._repository.db._sql_base_model import mapper_registry
 from ..job.job_id import JobId
 from .submission_status import SubmissionStatus
 
 
-@mapper_registry.mapped
 @dataclass
 class _SubmissionModel(_BaseModel):
-    __table__ = Table(
-        "submission",
-        mapper_registry.metadata,
-        Column("id", String, primary_key=True),
-        Column("entity_id", String),
-        Column("entity_type", String),
-        Column("entity_config_id", String),
-        Column("job_ids", JSON),
-        Column("properties", JSON),
-        Column("creation_date", String),
-        Column("submission_status", Enum(SubmissionStatus)),
-        Column("version", String),
-        Column("is_completed", Boolean),
-        Column("is_abandoned", Boolean),
-        Column("is_canceled", Boolean),
-        Column("running_jobs", JSON),
-        Column("blocked_jobs", JSON),
-        Column("pending_jobs", JSON),
-    )
     id: str
     entity_id: str
     entity_type: str

+ 0 - 18
taipy/core/submission/_submission_sql_repository.py

@@ -1,18 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-from .._repository._sql_repository import _SQLRepository
-from ._submission_converter import _SubmissionConverter
-from ._submission_model import _SubmissionModel
-
-
-class _SubmissionSQLRepository(_SQLRepository):
-    def __init__(self) -> None:
-        super().__init__(model_type=_SubmissionModel, converter=_SubmissionConverter)

+ 2 - 2
taipy/core/taipy.py

@@ -528,7 +528,7 @@ def get_scenarios(
         descending (bool): If True, sort the output list of scenarios in descending order.
             The default value is False.
         created_start_time (Optional[datetime]): The optional inclusive start date to filter scenarios by creation date.
-        created_end_time (Optional[datetime]): The optional inclusive end date to filter scenarios by creation date.
+        created_end_time (Optional[datetime]): The optional exclusive end date to filter scenarios by creation date.
         sort_key (Literal["name", "id", "creation_date", "tags"]): The optional sort_key to
             decide upon what key scenarios are sorted. The sorting is in increasing order for
             dates, in alphabetical order for name and id, and in lexicographical order for tags.
@@ -586,7 +586,7 @@ def get_primary_scenarios(
         descending (bool): If True, sort the output list of scenarios in descending order.
             The default value is False.
         created_start_time (Optional[datetime]): The optional inclusive start date to filter scenarios by creation date.
-        created_end_time (Optional[datetime]): The optional inclusive end date to filter scenarios by creation date.
+        created_end_time (Optional[datetime]): The optional exclusive end date to filter scenarios by creation date.
         sort_key (Literal["name", "id", "creation_date", "tags"]): The optional sort_key to
             decide upon what key scenarios are sorted. The sorting is in increasing order for
             dates, in alphabetical order for name and id, and in lexicographical order for tags.

+ 2 - 5
taipy/core/task/_task_manager_factory.py

@@ -15,18 +15,15 @@ from .._manager._manager_factory import _ManagerFactory
 from ..common._utils import _load_fct
 from ._task_fs_repository import _TaskFSRepository
 from ._task_manager import _TaskManager
-from ._task_sql_repository import _TaskSQLRepository
 
 
 class _TaskManagerFactory(_ManagerFactory):
-    __REPOSITORY_MAP = {"default": _TaskFSRepository, "sql": _TaskSQLRepository}
+    __REPOSITORY_MAP = {"default": _TaskFSRepository}
 
     @classmethod
     def _build_manager(cls) -> Type[_TaskManager]:
         if cls._using_enterprise():
-            task_manager = _load_fct(
-                cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager", "_TaskManager"
-            )  # type: ignore
+            task_manager = _load_fct(cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager", "_TaskManager")  # type: ignore
             build_repository = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager_factory", "_TaskManagerFactory"
             )._build_repository  # type: ignore

+ 0 - 19
taipy/core/task/_task_model.py

@@ -12,30 +12,11 @@
 from dataclasses import dataclass
 from typing import Any, Dict, List, Optional
 
-from sqlalchemy import JSON, Boolean, Column, String, Table
-
 from .._repository._base_taipy_model import _BaseModel
-from .._repository.db._sql_base_model import mapper_registry
 
 
-@mapper_registry.mapped
 @dataclass
 class _TaskModel(_BaseModel):
-    __table__ = Table(
-        "task",
-        mapper_registry.metadata,
-        Column("id", String, primary_key=True),
-        Column("owner_id", String),
-        Column("parent_ids", JSON),
-        Column("config_id", String),
-        Column("input_ids", JSON),
-        Column("function_name", String),
-        Column("function_module", String),
-        Column("output_ids", JSON),
-        Column("version", String),
-        Column("skippable", Boolean),
-        Column("properties", JSON),
-    )
     id: str
     owner_id: Optional[str]
     parent_ids: List[str]

+ 0 - 18
taipy/core/task/_task_sql_repository.py

@@ -1,18 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-from .._repository._sql_repository import _SQLRepository
-from ._task_converter import _TaskConverter
-from ._task_model import _TaskModel
-
-
-class _TaskSQLRepository(_SQLRepository):
-    def __init__(self) -> None:
-        super().__init__(model_type=_TaskModel, converter=_TaskConverter)

+ 2 - 0
taipy/gui/.gitignore

@@ -56,3 +56,5 @@ gui/packaging/taipy-gui.gen.d.ts
 
 # GUI Build
 webapp
+
+!data

+ 2 - 2
taipy/gui/__init__.py

@@ -46,8 +46,8 @@ application.
     add functionality to Taipy GUI:
 
     - [`python-magic`](https://pypi.org/project/python-magic/): identifies image format
-      from byte buffers so the [`image`](../../../userman/gui/viselements/standard-and-blocks/image.md) control can
-      display them, and so that [`file_download`](../../../userman/gui/viselements/standard-and-blocks/file_download.md)
+      from byte buffers so the [`image`](../../../userman/gui/viselements/generic/image.md) control can
+      display them, and so that [`file_download`](../../../userman/gui/viselements/generic/file_download.md)
       can request the browser to display the image content when relevant.<br/>
       You can install that package with the regular `pip install python-magic` command
       (then potentially `pip install python-magic` on Windows),

+ 22 - 11
taipy/gui/_renderers/builder.py

@@ -377,11 +377,13 @@ class _Builder:
     def __set_react_attribute(self, name: str, value: t.Any):
         return self.set_attribute(name, "{!" + (str(value).lower() if isinstance(value, bool) else str(value)) + "!}")
 
-    def _get_lov_adapter(self, var_name: str, property_name: t.Optional[str] = None, multi_selection=True):  # noqa: C901
+    def _get_lov_adapter(  # noqa: C901
+        self, var_name: str, property_name: t.Optional[str] = None, multi_selection=True, with_default=True
+    ):
         property_name = var_name if property_name is None else property_name
         lov_name = self.__hashes.get(var_name)
         lov = self.__get_list_of_(var_name)
-        default_lov = []
+        default_lov: t.Optional[t.List[t.Any]] = [] if with_default or not lov_name else None
 
         adapter = self.__attributes.get("adapter")
         if adapter and isinstance(adapter, str):
@@ -396,15 +398,15 @@ class _Builder:
         if isinstance(lov, list):
             if not isinstance(var_type, str):
                 elt = None
-                if len(lov) == 0:
+                if lov:
+                    elt = lov[0]
+                else:
                     value = self.__attributes.get("value")
                     if isinstance(value, list):
                         if len(value) > 0:
                             elt = value[0]
                     else:
                         elt = value
-                else:
-                    elt = lov[0]
                 var_type = self.__gui._get_unique_type_adapter(type(elt).__name__)
             if adapter is None:
                 adapter = self.__gui._get_adapter_for_type(var_type)
@@ -427,7 +429,7 @@ class _Builder:
             if adapter is not None:
                 self.__gui._add_adapter_for_type(var_type, adapter)  # type: ignore
 
-            if len(lov) > 0:
+            if default_lov is not None and lov:
                 for elt in lov:
                     ret = self.__gui._run_adapter(
                         t.cast(t.Callable, adapter), elt, adapter.__name__ if callable(adapter) else "adapter"
@@ -453,7 +455,8 @@ class _Builder:
                 self.__set_default_value("value", ret_val)
 
         # LoV default value
-        self.__set_json_attribute(_to_camel_case(f"default_{property_name}"), default_lov)
+        if default_lov is not None:
+            self.__set_json_attribute(_to_camel_case(f"default_{property_name}"), default_lov)
 
         # LoV expression binding
         if lov_name:
@@ -531,7 +534,7 @@ class _Builder:
                     + "}"
                 )
                 self.__update_vars.append(f"comparedatas={','.join(cmp_datas_hash)}")
-        col_types = self.__gui._accessors._get_col_types(data_hash, _TaipyData(data, data_hash))
+        col_types = self.__gui._get_accessor().get_col_types(data_hash, _TaipyData(data, data_hash))
         col_dict = _get_columns_dict(
             data, self.__attributes.get("columns", {}), col_types, date_format, self.__attributes.get("number_format")
         )
@@ -591,7 +594,7 @@ class _Builder:
         # read column definitions
         data = self.__attributes.get("data")
         data_hash = self.__hashes.get("data", "")
-        col_types = self.__gui._accessors._get_col_types(data_hash, _TaipyData(data, data_hash))
+        col_types = self.__gui._get_accessor().get_col_types(data_hash, _TaipyData(data, data_hash))
 
         config = _build_chart_config(self.__gui, self.__attributes, col_types)
 
@@ -1031,8 +1034,16 @@ class _Builder:
                 self.__set_dynamic_date_attribute(attr[0], _get_tuple_val(attr, 2, None))
             elif var_type == PropertyType.data:
                 self.__set_dynamic_property_without_default(attr[0], var_type)
-            elif var_type == PropertyType.lov or var_type == PropertyType.single_lov:
-                self._get_lov_adapter(attr[0], multi_selection=var_type != PropertyType.single_lov)
+            elif (
+                var_type == PropertyType.lov
+                or var_type == PropertyType.single_lov
+                or var_type == PropertyType.lov_no_default
+            ):
+                self._get_lov_adapter(
+                    attr[0],
+                    multi_selection=var_type != PropertyType.single_lov,
+                    with_default=var_type != PropertyType.lov_no_default,
+                )
             elif var_type == PropertyType.lov_value:
                 self.__set_dynamic_property_without_default(
                     attr[0], var_type, _get_tuple_val(attr, 2, None) == "optional"

+ 1 - 1
taipy/gui/builder/_utils.py

@@ -51,7 +51,7 @@ class _LambdaByName(ast.NodeVisitor):
 
     def __init__(self, element_name: str, lineno: int, lambdas: t.Dict[str, ast.Lambda]) -> None:
         super().__init__()
-        self.element_name = element_name
+        self.element_name = element_name.split(".")[-1]
         self.lambdas = lambdas
         self.lineno = lineno + 1
 

+ 20 - 13
taipy/gui/data/array_dict_data_accessor.py

@@ -13,7 +13,6 @@ import typing as t
 
 import pandas as pd
 
-from ..gui import Gui
 from ..utils import _MapDict
 from .data_format import _DataFormat
 from .pandas_data_accessor import _PandasDataAccessor
@@ -23,11 +22,11 @@ class _ArrayDictDataAccessor(_PandasDataAccessor):
     __types = (dict, list, tuple, _MapDict)
 
     @staticmethod
-    def get_supported_classes() -> t.List[str]:
-        return [t.__name__ for t in _ArrayDictDataAccessor.__types]  # type: ignore
+    def get_supported_classes() -> t.List[t.Type]:
+        return list(_ArrayDictDataAccessor.__types)
 
-    def _get_dataframe(self, value: t.Any) -> t.Union[t.List[pd.DataFrame], pd.DataFrame]:
-        if isinstance(value, list):
+    def to_pandas(self, value: t.Any) -> t.Union[t.List[pd.DataFrame], pd.DataFrame]:
+        if isinstance(value, (list, tuple)):
             if not value or isinstance(value[0], (str, int, float, bool)):
                 return pd.DataFrame({"0": value})
             types = {type(x) for x in value}
@@ -45,7 +44,7 @@ class _ArrayDictDataAccessor(_PandasDataAccessor):
                 elif type_elt is _MapDict:
                     return [pd.DataFrame(v._dict) for v in value]
                 elif type_elt is pd.DataFrame:
-                    return value
+                    return t.cast(t.List[pd.DataFrame], value)
 
             elif len(types) == 2 and list in types and pd.DataFrame in types:
                 return [v if isinstance(v, pd.DataFrame) else pd.DataFrame({f"{i}/0": v}) for i, v in enumerate(value)]
@@ -53,14 +52,22 @@ class _ArrayDictDataAccessor(_PandasDataAccessor):
             return pd.DataFrame(value._dict)
         return pd.DataFrame(value)
 
+    def _from_pandas(self, value: pd.DataFrame, type: t.Type):
+        if type is dict:
+            return value.to_dict("list")
+        if type is _MapDict:
+            return _MapDict(value.to_dict("list"))
+        if len(value.columns) == 1:
+            if type is list:
+                return value.iloc[:, 0].to_list()
+            if type is tuple:
+                return tuple(value.iloc[:, 0].to_list())
+        return super()._from_pandas(value, type)
+
     def get_col_types(self, var_name: str, value: t.Any) -> t.Union[None, t.Dict[str, str]]:  # type: ignore
-        if isinstance(value, _ArrayDictDataAccessor.__types):  # type: ignore
-            return super().get_col_types(var_name, self._get_dataframe(value))
-        return None
+        return super().get_col_types(var_name, self.to_pandas(value))
 
     def get_data(  # noqa: C901
-        self, guiApp: Gui, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
-        if isinstance(value, _ArrayDictDataAccessor.__types):  # type: ignore
-            return super().get_data(guiApp, var_name, self._get_dataframe(value), payload, data_format)
-        return {}
+        return super().get_data(var_name, self.to_pandas(value), payload, data_format)

+ 2 - 2
taipy/gui/data/comparison.py

@@ -27,12 +27,12 @@ def _compare_function(
             return None
         compare_fn = gui._get_user_function(compare_name) if compare_name else None
         if callable(compare_fn):
-            return gui._accessors._get_dataframe(
+            return gui._get_accessor().to_pandas(
                 gui._call_function_with_state(compare_fn, [name, [gui._get_real_var_name(n) for n in names]])
             )
         elif compare_fn is not None:
             _warn(f"{compare_name}(): compare function name is not valid.")
-        dfs = [gui._accessors._get_dataframe(_getscopeattr(gui, n)) for n in names]
+        dfs = [gui._get_accessor().to_pandas(_getscopeattr(gui, n)) for n in names]
         return value.compare(dfs[0], keep_shape=True)
     except Exception as e:
         if not gui._call_on_exception(compare_name or "Gui._compare_function", e):

+ 58 - 41
taipy/gui/data/data_accessor.py

@@ -17,18 +17,24 @@ from .._warnings import _warn
 from ..utils import _TaipyData
 from .data_format import _DataFormat
 
+if t.TYPE_CHECKING:
+    from ..gui import Gui
+
 
 class _DataAccessor(ABC):
     _WS_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
 
+    def __init__(self, gui: "Gui") -> None:
+        self._gui = gui
+
     @staticmethod
     @abstractmethod
-    def get_supported_classes() -> t.List[str]:
+    def get_supported_classes() -> t.List[t.Type]:
         pass
 
     @abstractmethod
     def get_data(
-        self, guiApp: t.Any, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
         pass
 
@@ -37,54 +43,61 @@ class _DataAccessor(ABC):
         pass
 
     @abstractmethod
-    def _get_dataframe(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
+    def to_pandas(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
         pass
 
     @abstractmethod
-    def _on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
         pass
 
     @abstractmethod
-    def _on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
         pass
 
     @abstractmethod
-    def _on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
         pass
 
+    @abstractmethod
+    def to_csv(self, var_name: str, value: t.Any):
+        pass
+
+
 class _InvalidDataAccessor(_DataAccessor):
     @staticmethod
-    def get_supported_classes() -> t.List[str]:
-        return [type(None).__name__]
+    def get_supported_classes() -> t.List[t.Type]:
+        return []
 
     def get_data(
-        self, guiApp: t.Any, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
         return {}
 
     def get_col_types(self, var_name: str, value: t.Any) -> t.Dict[str, str]:
         return {}
 
-    def _get_dataframe(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
+    def to_pandas(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
         return None
 
-    def _on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
         return None
 
-    def _on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
         return None
 
-    def _on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
         return None
 
+    def to_csv(self, var_name: str, value: t.Any):
+        return None
 
-class _DataAccessors(object):
-    def __init__(self) -> None:
-        self.__access_4_type: t.Dict[str, _DataAccessor] = {}
-
-        self.__invalid_data_accessor = _InvalidDataAccessor()
 
+class _DataAccessors(object):
+    def __init__(self, gui: "Gui") -> None:
+        self.__access_4_type: t.Dict[t.Type, _DataAccessor] = {}
+        self.__invalid_data_accessor = _InvalidDataAccessor(gui)
         self.__data_format = _DataFormat.JSON
+        self.__gui = gui
 
         from .array_dict_data_accessor import _ArrayDictDataAccessor
         from .numpy_data_accessor import _NumpyDataAccessor
@@ -99,52 +112,56 @@ class _DataAccessors(object):
             raise AttributeError("The argument of 'DataAccessors.register' should be a class")
         if not issubclass(cls, _DataAccessor):
             raise TypeError(f"Class {cls.__name__} is not a subclass of DataAccessor")
-        names = cls.get_supported_classes()
-        if not names:
+        classes = cls.get_supported_classes()
+        if not classes:
             raise TypeError(f"method {cls.__name__}.get_supported_classes returned an invalid value")
         # check existence
         inst: t.Optional[_DataAccessor] = None
-        for name in names:
-            inst = self.__access_4_type.get(name)
+        for cl in classes:
+            inst = self.__access_4_type.get(cl)
             if inst:
                 break
         if inst is None:
             try:
-                inst = cls()
+                inst = cls(self.__gui)
             except Exception as e:
                 raise TypeError(f"Class {cls.__name__} cannot be instantiated") from e
             if inst:
-                for name in names:
-                    self.__access_4_type[name] = inst  # type: ignore
+                for cl in classes:
+                    self.__access_4_type[cl] = inst  # type: ignore
 
     def __get_instance(self, value: _TaipyData) -> _DataAccessor:  # type: ignore
         value = value.get() if isinstance(value, _TaipyData) else value
-        access = self.__access_4_type.get(type(value).__name__)
+        access = self.__access_4_type.get(type(value))
         if access is None:
             if value is not None:
-                _warn(f"Can't find Data Accessor for type {type(value).__name__}.")
+                _warn(f"Can't find Data Accessor for type {str(type(value))}.")
             return self.__invalid_data_accessor
         return access
 
-    def _get_data(
-        self, guiApp: t.Any, var_name: str, value: _TaipyData, payload: t.Dict[str, t.Any]
-    ) -> t.Dict[str, t.Any]:
-        return self.__get_instance(value).get_data(guiApp, var_name, value.get(), payload, self.__data_format)
+    def get_data(self, var_name: str, value: _TaipyData, payload: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
+        return self.__get_instance(value).get_data(var_name, value.get(), payload, self.__data_format)
 
-    def _get_col_types(self, var_name: str, value: _TaipyData) -> t.Dict[str, str]:
+    def get_col_types(self, var_name: str, value: _TaipyData) -> t.Dict[str, str]:
         return self.__get_instance(value).get_col_types(var_name, value.get())
 
-    def _set_data_format(self, data_format: _DataFormat):
+    def set_data_format(self, data_format: _DataFormat):
         self.__data_format = data_format
 
-    def _get_dataframe(self, value: t.Any):
-        return self.__get_instance(value)._get_dataframe(value)
+    def get_dataframe(self, value: t.Any):
+        return self.__get_instance(value).to_pandas(value)
+
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        return self.__get_instance(value).on_edit(value, payload)
+
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        return self.__get_instance(value).on_delete(value, payload)
 
-    def _on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
-        return self.__get_instance(value)._on_edit(value, payload)
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+        return self.__get_instance(value).on_add(value, payload, new_row)
 
-    def _on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
-        return self.__get_instance(value)._on_delete(value, payload)
+    def to_csv(self, var_name: str, value: t.Any):
+        return self.__get_instance(value).to_csv(var_name, value.get())
 
-    def _on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
-        return self.__get_instance(value)._on_add(value, payload, new_row)
+    def to_pandas(self, value: t.Any):
+        return self.__get_instance(value).to_pandas(value.get())

+ 1 - 0
taipy/gui/data/data_format.py

@@ -15,3 +15,4 @@ from enum import Enum
 class _DataFormat(Enum):
     JSON = "JSON"
     APACHE_ARROW = "ARROW"
+    CSV = "CSV"

+ 7 - 16
taipy/gui/data/numpy_data_accessor.py

@@ -14,8 +14,6 @@ import typing as t
 import numpy
 import pandas as pd
 
-from ..gui import Gui
-from .data_format import _DataFormat
 from .pandas_data_accessor import _PandasDataAccessor
 
 
@@ -23,20 +21,13 @@ class _NumpyDataAccessor(_PandasDataAccessor):
     __types = (numpy.ndarray,)
 
     @staticmethod
-    def get_supported_classes() -> t.List[str]:
-        return [t.__name__ for t in _NumpyDataAccessor.__types]  # type: ignore
+    def get_supported_classes() -> t.List[t.Type]:
+        return list(_NumpyDataAccessor.__types)
 
-    def _get_dataframe(self, value: t.Any) -> pd.DataFrame:
+    def to_pandas(self, value: t.Any) -> pd.DataFrame:
         return pd.DataFrame(value)
 
-    def get_col_types(self, var_name: str, value: t.Any) -> t.Union[None, t.Dict[str, str]]:  # type: ignore
-        if isinstance(value, _NumpyDataAccessor.__types):  # type: ignore
-            return super().get_col_types(var_name, self._get_dataframe(value))
-        return None
-
-    def get_data(  # noqa: C901
-        self, guiApp: Gui, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
-    ) -> t.Dict[str, t.Any]:
-        if isinstance(value, _NumpyDataAccessor.__types):  # type: ignore
-            return super().get_data(guiApp, var_name, self._get_dataframe(value), payload, data_format)
-        return {}
+    def _from_pandas(self, value: pd.DataFrame, type: t.Type):
+        if type is numpy.ndarray:
+            return value.to_numpy()
+        return super()._from_pandas(value, type)

+ 96 - 68
taipy/gui/data/pandas_data_accessor.py

@@ -9,9 +9,11 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+import os
 import typing as t
 from datetime import datetime
 from importlib import util
+from tempfile import mkstemp
 
 import numpy as np
 import pandas as pd
@@ -39,12 +41,22 @@ class _PandasDataAccessor(_DataAccessor):
 
     __AGGREGATE_FUNCTIONS: t.List[str] = ["count", "sum", "mean", "median", "min", "max", "std", "first", "last"]
 
-    def _get_dataframe(self, value: t.Any) -> t.Any:
+    def to_pandas(self, value: t.Union[pd.DataFrame, pd.Series]) -> t.Union[t.List[pd.DataFrame], pd.DataFrame]:
+        return self.__to_dataframe(value)
+
+    def __to_dataframe(self, value: t.Union[pd.DataFrame, pd.Series]) -> pd.DataFrame:
+        if isinstance(value, pd.Series):
+            return pd.DataFrame(value)
+        return t.cast(pd.DataFrame, value)
+
+    def _from_pandas(self, value: pd.DataFrame, data_type: t.Type):
+        if data_type is pd.Series:
+            return value.iloc[:, 0]
         return value
 
     @staticmethod
-    def get_supported_classes() -> t.List[str]:
-        return [t.__name__ for t in _PandasDataAccessor.__types]  # type: ignore
+    def get_supported_classes() -> t.List[t.Type]:
+        return  list(_PandasDataAccessor.__types)
 
     @staticmethod
     def __user_function(
@@ -68,7 +80,6 @@ class _PandasDataAccessor(_DataAccessor):
 
     def __build_transferred_cols(
         self,
-        gui: Gui,
         payload_cols: t.Any,
         dataframe: pd.DataFrame,
         styles: t.Optional[t.Dict[str, str]] = None,
@@ -89,9 +100,9 @@ class _PandasDataAccessor(_DataAccessor):
                 is_copied = True
             for k, v in styles.items():
                 col_applied = False
-                func = gui._get_user_function(v)
+                func = self._gui._get_user_function(v)
                 if callable(func):
-                    col_applied = self.__apply_user_function(gui, func, k if k in cols else None, v, dataframe, "tps__")
+                    col_applied = self.__apply_user_function(func, k if k in cols else None, v, dataframe, "tps__")
                 if not col_applied:
                     dataframe[v] = v
                 cols.append(col_applied or v)
@@ -102,9 +113,9 @@ class _PandasDataAccessor(_DataAccessor):
                 is_copied = True
             for k, v in tooltips.items():
                 col_applied = False
-                func = gui._get_user_function(v)
+                func = self._gui._get_user_function(v)
                 if callable(func):
-                    col_applied = self.__apply_user_function(gui, func, k if k in cols else None, v, dataframe, "tpt__")
+                    col_applied = self.__apply_user_function(func, k if k in cols else None, v, dataframe, "tpt__")
                 cols.append(col_applied or v)
         # deal with dates
         datecols = col_types[col_types.astype(str).str.startswith("datetime")].index.tolist()  # type: ignore
@@ -144,7 +155,6 @@ class _PandasDataAccessor(_DataAccessor):
 
     def __apply_user_function(
         self,
-        gui: Gui,
         user_function: t.Callable,
         column_name: t.Optional[str],
         function_name: str,
@@ -156,7 +166,7 @@ class _PandasDataAccessor(_DataAccessor):
             data[new_col_name] = data.apply(
                 _PandasDataAccessor.__user_function,
                 axis=1,
-                args=(gui, column_name, user_function, function_name),
+                args=(self._gui, column_name, user_function, function_name),
             )
             return new_col_name
         except Exception as e:
@@ -185,7 +195,7 @@ class _PandasDataAccessor(_DataAccessor):
             ret["start"] = start
         if data_extraction is not None:
             ret["dataExtraction"] = data_extraction  # Extract data out of dictionary on front-end
-        if data_format == _DataFormat.APACHE_ARROW:
+        if data_format is _DataFormat.APACHE_ARROW:
             if not _has_arrow_module:
                 raise RuntimeError("Cannot use Arrow as pyarrow package is not installed")
             # Convert from pandas to Arrow
@@ -208,22 +218,19 @@ class _PandasDataAccessor(_DataAccessor):
         return ret
 
     def get_col_types(self, var_name: str, value: t.Any) -> t.Union[None, t.Dict[str, str]]:  # type: ignore
-        if isinstance(value, pd.Series):
-            value = value.to_frame()
-        if isinstance(value, pd.DataFrame):  # type: ignore
-            return {str(k): v for k, v in value.dtypes.apply(lambda x: x.name.lower()).items()}
-        elif isinstance(value, list):
+        if isinstance(value, list):
             ret_dict: t.Dict[str, str] = {}
             for i, v in enumerate(value):
-                ret_dict.update({f"{i}/{k}": v for k, v in v.dtypes.apply(lambda x: x.name.lower()).items()})
+                ret_dict.update(
+                    {f"{i}/{k}": v for k, v in self.__to_dataframe(v).dtypes.apply(lambda x: x.name.lower()).items()}
+                )
             return ret_dict
-        return None
+        return {str(k): v for k, v in self.__to_dataframe(value).dtypes.apply(lambda x: x.name.lower()).items()}
 
     def __get_data(  # noqa: C901
         self,
-        gui: Gui,
         var_name: str,
-        value: t.Union[pd.DataFrame, pd.Series],
+        df: pd.DataFrame,
         payload: t.Dict[str, t.Any],
         data_format: _DataFormat,
         col_prefix: t.Optional[str] = "",
@@ -235,19 +242,17 @@ class _PandasDataAccessor(_DataAccessor):
         paged = not payload.get("alldata", False)
         is_copied = False
 
-        if isinstance(value, pd.Series):
-            value = value.to_frame()
-        orig_df = value
+        orig_df = df
         # add index if not chart
         if paged:
-            if _PandasDataAccessor.__INDEX_COL not in value.columns:
-                value = value.copy()
+            if _PandasDataAccessor.__INDEX_COL not in df.columns:
+                df = df.copy()
                 is_copied = True
-                value[_PandasDataAccessor.__INDEX_COL] = value.index
+                df[_PandasDataAccessor.__INDEX_COL] = df.index
             if columns and _PandasDataAccessor.__INDEX_COL not in columns:
                 columns.append(_PandasDataAccessor.__INDEX_COL)
 
-        fullrowcount = len(value)
+        fullrowcount = len(df)
         # filtering
         filters = payload.get("filters")
         if isinstance(filters, list) and len(filters) > 0:
@@ -258,7 +263,7 @@ class _PandasDataAccessor(_DataAccessor):
                 val = fd.get("value")
                 action = fd.get("action")
                 if isinstance(val, str):
-                    if self.__is_date_column(t.cast(pd.DataFrame, value), col):
+                    if self.__is_date_column(t.cast(pd.DataFrame, df), col):
                         val = datetime.fromisoformat(val[:-1])
                     vars.append(val)
                 val = f"@vars[{len(vars) - 1}]" if isinstance(val, (str, datetime)) else val
@@ -267,10 +272,10 @@ class _PandasDataAccessor(_DataAccessor):
                     query += " and "
                 query += f"`{col}`{right}"
             try:
-                value = value.query(query)
+                df = df.query(query)
                 is_copied = True
             except Exception as e:
-                _warn(f"Dataframe filtering: invalid query '{query}' on {value.head()}", e)
+                _warn(f"Dataframe filtering: invalid query '{query}' on {df.head()}", e)
 
         dictret: t.Optional[t.Dict[str, t.Any]]
         if paged:
@@ -278,7 +283,7 @@ class _PandasDataAccessor(_DataAccessor):
             applies = payload.get("applies")
             if isinstance(aggregates, list) and len(aggregates) and isinstance(applies, dict):
                 applies_with_fn = {
-                    k: v if v in _PandasDataAccessor.__AGGREGATE_FUNCTIONS else gui._get_user_function(v)
+                    k: v if v in _PandasDataAccessor.__AGGREGATE_FUNCTIONS else self._gui._get_user_function(v)
                     for k, v in applies.items()
                 }
 
@@ -286,14 +291,14 @@ class _PandasDataAccessor(_DataAccessor):
                     if col not in applies_with_fn.keys():
                         applies_with_fn[col] = "first"
                 try:
-                    value = t.cast(pd.DataFrame, value).groupby(aggregates).agg(applies_with_fn)
+                    df = t.cast(pd.DataFrame, df).groupby(aggregates).agg(applies_with_fn)
                 except Exception:
                     _warn(f"Cannot aggregate {var_name} with groupby {aggregates} and aggregates {applies}.")
             inf = payload.get("infinite")
             if inf is not None:
                 ret_payload["infinite"] = inf
             # real number of rows is needed to calculate the number of pages
-            rowcount = len(value)
+            rowcount = len(df)
             # here we'll deal with start and end values from payload if present
             if isinstance(payload["start"], int):
                 start = int(payload["start"])
@@ -326,9 +331,9 @@ class _PandasDataAccessor(_DataAccessor):
             order_by = payload.get("orderby")
             if isinstance(order_by, str) and len(order_by):
                 try:
-                    if value.columns.dtype.name == "int64":
+                    if df.columns.dtype.name == "int64":
                         order_by = int(order_by)
-                    new_indexes = t.cast(pd.DataFrame, value)[order_by].values.argsort(axis=0)
+                    new_indexes = t.cast(pd.DataFrame, df)[order_by].values.argsort(axis=0)
                     if payload.get("sort") == "desc":
                         # reverse order
                         new_indexes = new_indexes[::-1]
@@ -338,10 +343,9 @@ class _PandasDataAccessor(_DataAccessor):
                     new_indexes = slice(start, end + 1)  # type: ignore
             else:
                 new_indexes = slice(start, end + 1)  # type: ignore
-            value = self.__build_transferred_cols(
-                gui,
+            df = self.__build_transferred_cols(
                 columns,
-                t.cast(pd.DataFrame, value),
+                t.cast(pd.DataFrame, df),
                 styles=payload.get("styles"),
                 tooltips=payload.get("tooltips"),
                 is_copied=is_copied,
@@ -349,7 +353,7 @@ class _PandasDataAccessor(_DataAccessor):
                 handle_nan=payload.get("handlenan", False),
             )
             dictret = self.__format_data(
-                value,
+                df,
                 data_format,
                 "records",
                 start,
@@ -360,7 +364,7 @@ class _PandasDataAccessor(_DataAccessor):
             compare = payload.get("compare")
             if isinstance(compare, str):
                 comp_df = _compare_function(
-                    gui, compare, var_name, t.cast(pd.DataFrame, orig_df), payload.get("compare_datas", "")
+                    self._gui, compare, var_name, t.cast(pd.DataFrame, orig_df), payload.get("compare_datas", "")
                 )
                 if isinstance(comp_df, pd.DataFrame) and not comp_df.empty:
                     try:
@@ -369,7 +373,7 @@ class _PandasDataAccessor(_DataAccessor):
                             comp_df = t.cast(pd.DataFrame, comp_df.get(cols))
                             comp_df.columns = t.cast(pd.Index, [t.cast(tuple, c)[0] for c in cols])
                         comp_df.dropna(axis=1, how="all", inplace=True)
-                        comp_df = self.__build_transferred_cols(gui, columns, comp_df, new_indexes=new_indexes)
+                        comp_df = self.__build_transferred_cols(columns, comp_df, new_indexes=new_indexes)
                         dictret["comp"] = self.__format_data(comp_df, data_format, "records").get("data")
                     except Exception as e:
                         _warn("Pandas accessor compare raised an exception", e)
@@ -382,7 +386,9 @@ class _PandasDataAccessor(_DataAccessor):
             for decimator_pl in decimators:
                 decimator = decimator_pl.get("decimator")
                 decimator_instance = (
-                    gui._get_user_instance(decimator, PropertyType.decimator.value) if decimator is not None else None
+                    self._gui._get_user_instance(decimator, PropertyType.decimator.value)
+                    if decimator is not None
+                    else None
                 )
                 if isinstance(decimator_instance, PropertyType.decimator.value):
                     x_column, y_column, z_column = (
@@ -398,14 +404,14 @@ class _PandasDataAccessor(_DataAccessor):
                         y0 = relayoutData.get("yaxis.range[0]")
                         y1 = relayoutData.get("yaxis.range[1]")
 
-                        value, is_copied = _df_relayout(
-                            t.cast(pd.DataFrame, value), x_column, y_column, chart_mode, x0, x1, y0, y1, is_copied
+                        df, is_copied = _df_relayout(
+                            t.cast(pd.DataFrame, df), x_column, y_column, chart_mode, x0, x1, y0, y1, is_copied
                         )
 
-                    if nb_rows_max and decimator_instance._is_applicable(value, nb_rows_max, chart_mode):
+                    if nb_rows_max and decimator_instance._is_applicable(df, nb_rows_max, chart_mode):
                         try:
-                            value, is_copied = _df_data_filter(
-                                t.cast(pd.DataFrame, value),
+                            df, is_copied = _df_data_filter(
+                                t.cast(pd.DataFrame, df),
                                 x_column,
                                 y_column,
                                 z_column,
@@ -413,21 +419,21 @@ class _PandasDataAccessor(_DataAccessor):
                                 payload=decimator_payload,
                                 is_copied=is_copied,
                             )
-                            gui._call_on_change(f"{var_name}.{decimator}.nb_rows", len(value))
+                            self._gui._call_on_change(f"{var_name}.{decimator}.nb_rows", len(df))
                         except Exception as e:
                             _warn(f"Limit rows error with {decimator} for Dataframe", e)
-            value = self.__build_transferred_cols(gui, columns, t.cast(pd.DataFrame, value), is_copied=is_copied)
-            if payload.get("csv") is True:
-                ret_payload["df"] = value
+            df = self.__build_transferred_cols(columns, t.cast(pd.DataFrame, df), is_copied=is_copied)
+            if data_format is _DataFormat.CSV:
+                ret_payload["df"] = df
                 dictret = None
             else:
-                dictret = self.__format_data(value, data_format, "list", data_extraction=True)
+                dictret = self.__format_data(df, data_format, "list", data_extraction=True)
 
         ret_payload["value"] = dictret
         return ret_payload
 
     def get_data(
-        self, gui: Gui, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
         if isinstance(value, list):
             # If is_chart data
@@ -440,7 +446,7 @@ class _PandasDataAccessor(_DataAccessor):
                 data = []
                 for i, v in enumerate(value):
                     ret = (
-                        self.__get_data(gui, var_name, v, payload, data_format, f"{i}/")
+                        self.__get_data(var_name, self.__to_dataframe(v), payload, data_format, f"{i}/")
                         if isinstance(v, _PandasDataAccessor.__types)
                         else {}
                     )
@@ -451,18 +457,25 @@ class _PandasDataAccessor(_DataAccessor):
                 return ret_payload
             else:
                 value = value[0]
-        if isinstance(value, _PandasDataAccessor.__types):  # type: ignore
-            return self.__get_data(gui, var_name, value, payload, data_format)
-        return {}
+        return self.__get_data(var_name, self.__to_dataframe(value), payload, data_format)
 
-    def _on_edit(self, df: pd.DataFrame, payload: t.Dict[str, t.Any]):
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot edit {type(value)}.")
         df.at[payload["index"], payload["col"]] = payload["value"]
-        return df
+        return self._from_pandas(df, type(value))
 
-    def _on_delete(self, df: pd.DataFrame, payload: t.Dict[str, t.Any]):
-        return df.drop(payload["index"])
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot delete a row from {type(value)}.")
+        return self._from_pandas(df.drop(payload["index"]), type(value))
 
-    def _on_add(self, df: pd.DataFrame, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot add a row to {type(value)}.")
         # Save the insertion index
         index = payload["index"]
         # Create the new row (Column value types must match the original DataFrame's)
@@ -471,15 +484,30 @@ class _PandasDataAccessor(_DataAccessor):
             new_row = [0 if is_numeric_dtype(df[c]) else "" for c in list(col_types)] if new_row is None else new_row
             if index > 0:
                 # Column names and value types must match the original DataFrame
-                new_df = pd.DataFrame(new_row, columns=list(col_types))
+                new_df = pd.DataFrame([new_row], columns=list(col_types))
                 # Split the DataFrame
-                rows_before = df.loc[:index-1]
-                rows_after = df.loc[index+1:]
-                return pd.concat([rows_before, new_df, rows_after], ignore_index=True)
+                rows_before = df.iloc[:index]
+                rows_after = df.iloc[index:]
+                return self._from_pandas(pd.concat([rows_before, new_df, rows_after], ignore_index=True), type(value))
             else:
+                df = df.copy()
                 # Insert as the new first row
                 df.loc[-1] = new_row  # Insert the new row
                 df.index = df.index + 1  # Shift index
-                return df.sort_index()
-        return df
+                return self._from_pandas(df.sort_index(), type(value))
+        return value
 
+    def to_csv(self, var_name: str, value: t.Any):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot export {type(value)} to csv.")
+        dict_ret = self.__get_data(var_name, df, {"alldata": True}, _DataFormat.CSV)
+        if isinstance(dict_ret, dict):
+            dfr = dict_ret.get("df")
+            if isinstance(dfr, pd.DataFrame):
+                fd, temp_path = mkstemp(".csv", var_name, text=True)
+                with os.fdopen(fd, "wt", newline="") as csv_file:
+                    dfr.to_csv(csv_file, index=False)
+
+                return temp_path
+        return None

+ 27 - 34
taipy/gui/gui.py

@@ -26,7 +26,6 @@ import warnings
 from importlib import metadata, util
 from importlib.util import find_spec
 from pathlib import Path
-from tempfile import mkstemp
 from types import FrameType, FunctionType, LambdaType, ModuleType, SimpleNamespace
 from urllib.parse import unquote, urlencode, urlparse
 
@@ -63,7 +62,7 @@ from .builder import _ElementApiGenerator
 from .config import Config, ConfigParameter, _Config
 from .custom import Page as CustomPage
 from .data.content_accessor import _ContentAccessor
-from .data.data_accessor import _DataAccessor, _DataAccessors
+from .data.data_accessor import _DataAccessors
 from .data.data_format import _DataFormat
 from .data.data_scope import _DataScopes
 from .extension.library import Element, ElementLibrary
@@ -313,7 +312,7 @@ class Gui:
 
         self._config = _Config()
         self.__content_accessor = None
-        self._accessors = _DataAccessors()
+        self.__accessors: t.Optional[_DataAccessors] = None
         self.__state: t.Optional[State] = None
         self.__bindings = _Bindings(self)
         self.__locals_context = _LocalsContext()
@@ -1100,7 +1099,7 @@ class Gui:
                                 e,
                             )
             if not isinstance(ret_payload, dict):
-                ret_payload = self._accessors._get_data(self, var_name, newvalue, payload)
+                ret_payload = self._get_accessor().get_data(var_name, newvalue, payload)
             self.__send_ws_update_with_dict({var_name: ret_payload})
 
     def __request_var_update(self, payload: t.Any):
@@ -1408,22 +1407,16 @@ class Gui:
 
     def __download_csv(self, state: State, var_name: str, payload: dict):
         holder_name = t.cast(str, payload.get("var_name"))
-        ret = self._accessors._get_data(
-            self,
-            holder_name,
-            _getscopeattr(self, holder_name, None),
-            {"alldata": True, "csv": True},
-        )
-        if isinstance(ret, dict):
-            df = ret.get("df")
-            try:
-                fd, temp_path = mkstemp(".csv", var_name, text=True)
-                with os.fdopen(fd, "wt", newline="") as csv_file:
-                    df.to_csv(csv_file, index=False)  # type: ignore[union-attr]
-                self._download(temp_path, "data.csv", Gui.__DOWNLOAD_DELETE_ACTION)
-            except Exception as e:  # pragma: no cover
-                if not self._call_on_exception("download_csv", e):
-                    _warn("download_csv(): Exception raised", e)
+        try:
+            csv_path = self._get_accessor().to_csv(
+                holder_name,
+                _getscopeattr(self, holder_name, None),
+            )
+            if csv_path:
+                self._download(csv_path, "data.csv", Gui.__DOWNLOAD_DELETE_ACTION)
+        except Exception as e:  # pragma: no cover
+            if not self._call_on_exception("download_csv", e):
+                _warn("download_csv(): Exception raised", e)
 
     def __delete_csv(self, state: State, var_name: str, payload: dict):
         try:
@@ -1555,7 +1548,6 @@ class Gui:
         variable reflected in the user interface.
 
         Arguments:
-            gui (Gui^): The current Gui instance.
             callback: The user-defined function to be invoked.<br/>
                 The first parameter of this function must be a `State^` object representing the
                 client for which it is invoked.<br/>
@@ -1577,7 +1569,6 @@ class Gui:
         instance. All user interfaces reflect the change.
 
         Arguments:
-            gui (Gui^): The current Gui instance.
             var_name: The name of the variable to change.
             value: The new value for the variable.
         """
@@ -1600,9 +1591,9 @@ class Gui:
             values: An optional dictionary where each key is the name of a variable to change, and
                 where the associated value is the new value to set for that variable, in each state
                 for the application.
-            **kwargs: A collection of variable name-value pairs that are updated for each state of
-                the application. Name-value pairs overload the ones in *values* if the name exists
-                as a key in the dictionary.
+            **kwargs (dict[str, any]): A collection of variable name-value pairs that are updated
+                for each state of the application. Name-value pairs overload the ones in *values*
+                if the name exists as a key in the dictionary.
         """
         if kwargs:
             values = values.copy() if values else {}
@@ -1669,7 +1660,7 @@ class Gui:
         TODO: Default implementation of on_edit for tables
         """
         try:
-            setattr(state, var_name, self._accessors._on_edit(getattr(state, var_name), payload))
+            setattr(state, var_name, self._get_accessor().on_edit(getattr(state, var_name), payload))
         except Exception as e:
             _warn("TODO: Table.on_edit", e)
 
@@ -1678,7 +1669,7 @@ class Gui:
         TODO: Default implementation of on_delete for tables
         """
         try:
-            setattr(state, var_name, self._accessors._on_delete(getattr(state, var_name), payload))
+            setattr(state, var_name, self._get_accessor().on_delete(getattr(state, var_name), payload))
         except Exception as e:
             _warn("TODO: Table.on_delete", e)
 
@@ -1689,7 +1680,7 @@ class Gui:
         TODO: Default implementation of on_add for tables
         """
         try:
-            setattr(state, var_name, self._accessors._on_add(getattr(state, var_name), payload, new_row))
+            setattr(state, var_name, self._get_accessor().on_add(getattr(state, var_name), payload, new_row))
         except Exception as e:
             _warn("TODO: Table.on_add", e)
 
@@ -1706,7 +1697,7 @@ class Gui:
                     col_dict = _get_columns_dict(
                         data,
                         attributes.get("columns", {}),
-                        self._accessors._get_col_types(data_hash, _TaipyData(data, data_hash)),
+                        self._get_accessor().get_col_types(data_hash, _TaipyData(data, data_hash)),
                         attributes.get("date_format"),
                         attributes.get("number_format"),
                     )
@@ -1729,7 +1720,7 @@ class Gui:
                     config = _build_chart_config(
                         self,
                         attributes,
-                        self._accessors._get_col_types(data_hash, _TaipyData(kwargs.get(data_hash), data_hash)),
+                        self._get_accessor().get_col_types(data_hash, _TaipyData(kwargs.get(data_hash), data_hash)),
                     )
 
                     return json.dumps(config, cls=_TaipyJsonEncoder)
@@ -2356,9 +2347,6 @@ class Gui:
             }
         )
 
-    def _register_data_accessor(self, data_accessor_class: t.Type[_DataAccessor]) -> None:
-        self._accessors._register(data_accessor_class)
-
     def get_flask_app(self) -> Flask:
         """Get the internal Flask application.
 
@@ -2568,6 +2556,11 @@ class Gui:
         for bp in self._flask_blueprint:
             self._server.get_flask().register_blueprint(bp)
 
+    def _get_accessor(self):
+        if self.__accessors is None:
+            self.__accessors = _DataAccessors(self)
+        return self.__accessors
+
     def run(
         self,
         run_server: bool = True,
@@ -2707,7 +2700,7 @@ class Gui:
         self.__register_blueprint()
 
         # Register data accessor communication data format (JSON, Apache Arrow)
-        self._accessors._set_data_format(_DataFormat.APACHE_ARROW if app_config["use_arrow"] else _DataFormat.JSON)
+        self._get_accessor().set_data_format(_DataFormat.APACHE_ARROW if app_config["use_arrow"] else _DataFormat.JSON)
 
         # Use multi user or not
         self._bindings()._set_single_client(bool(app_config["single_client"]))

+ 2 - 2
taipy/gui/icon.py

@@ -15,8 +15,8 @@ import typing as t
 class Icon:
     """Small image in the User Interface.
 
-    Icons are typically used in controls like [button](../../userman/gui/viselements/standard-and-blocks/button.md)
-    or items in a [menu](../../userman/gui/viselements/standard-and-blocks/menu.md).
+    Icons are typically used in controls like [button](../../userman/gui/viselements/generic/button.md)
+    or items in a [menu](../../userman/gui/viselements/generic/menu.md).
 
     Attributes:
         path (str): The path to the image file.

+ 3 - 3
taipy/gui/partial.py

@@ -28,9 +28,9 @@ class Partial(_Page):
     and not related pages. This allows not to have to repeat yourself when
     creating your page templates.
 
-    Visual elements such as [`part`](../../userman/gui/viselements/standard-and-blocks/part.md),
-    [`dialog`](../..//userman/gui/viselements/standard-and-blocks/dialog.md) or
-    [`pane`](../../userman/gui/viselements/standard-and-blocks/pane.md) can use Partials.
+    Visual elements such as [`part`](../../userman/gui/viselements/generic/part.md),
+    [`dialog`](../..//userman/gui/viselements/generic/dialog.md) or
+    [`pane`](../../userman/gui/viselements/generic/pane.md) can use Partials.
 
     Note that `Partial` has no constructor (no `__init__()` method): to create a
     `Partial`, you must call the `Gui.add_partial()^` function.

+ 1 - 0
taipy/gui/types.py

@@ -118,6 +118,7 @@ class PropertyType(Enum):
     json = "json"
     single_lov = "singlelov"
     lov = _TaipyLov
+    lov_no_default = "lovnodefault"
     """
     The property holds a LoV (list of values).
     """

+ 6 - 6
taipy/gui/viselements.json

@@ -627,7 +627,7 @@
                     {
                         "name": "decimator",
                         "type": "indexed(taipy.gui.data.Decimator)",
-                        "doc": "A decimator instance for the indicated trace that will reduce the size of the data being sent back and forth.<br>If defined as indexed, it will impact only the indicated trace; if not, it will apply only the first trace."
+                        "doc": "A decimator instance for the indicated trace that reduces the volume of the data being sent back and forth.<br>If defined as <i>indexed</i>, it impacts only the indicated trace; if not, it applies to the first trace only."
                     },
                     {
                         "name": "rebuild",
@@ -1224,7 +1224,7 @@
                     {
                         "name": "color_map",
                         "type": "dict",
-                        "doc": "TODO The color_map is used to display different colors for different ranges of the metric. The color_map's keys represent the starting point of each range, which is a number, while the values represent the corresponding color for that range. If the value associated with a key is set to None, it implies that the corresponding range will not be assigned any color."
+                        "doc": "TODO The color_map is used to display different colors for different ranges of the metric. The color_map's keys represent the starting point of each range, which is a number, while the values represent the corresponding color for that range. If the value associated with a key is set to None, it implies that the corresponding range is not assigned any color."
                     },
                     {
                         "name": "width",
@@ -1615,7 +1615,7 @@
                         "name": "class_name",
                         "default_property": true,
                         "type": "dynamic(str)",
-                        "doc": "A list of CSS class names, separated by white spaces, that will be associated with the generated HTML Element.<br/>These class names are added to the default <code>taipy-part</code>."
+                        "doc": "A list of CSS class names, separated by white spaces, that are associated with the generated HTML Element.<br/>These class names are added to the default <code>taipy-part</code> class name."
                     },
                     {
                         "name": "page",
@@ -1698,7 +1698,7 @@
                         "name": "close_label",
                         "type": "str",
                         "default_value": "\"Close\"",
-                        "doc": "The tooltip of the top-right close icon button. In the <tt>on_action</tt> callback, args will hold -1."
+                        "doc": "The tooltip of the top-right close icon button. In the <tt>on_action</tt> callback, <i>args</i> will be set to -1."
                     },
                     {
                         "name": "labels",
@@ -1965,7 +1965,7 @@
                     {
                         "name": "id",
                         "type": "str",
-                        "doc": "The identifier that will be assigned to the rendered HTML component."
+                        "doc": "The identifier that is assigned to the rendered HTML component."
                     },
                     {
                         "name": "properties",
@@ -1975,7 +1975,7 @@
                     {
                         "name": "class_name",
                         "type": "dynamic(str)",
-                        "doc": "The list of CSS class names that will be associated with the generated HTML Element.<br/>These class names will be added to the default <code>taipy-&lt;element_type&gt;</code>."
+                        "doc": "The list of CSS class names that are associated with the generated HTML Element.<br/>These class names are added to the default <code>taipy-&lt;element_type&gt;</code> class name."
                     },
                     {
                         "name": "hover_text",

+ 2 - 2
taipy/gui_core/_GuiCoreLib.py

@@ -87,7 +87,7 @@ class _GuiCore(ElementLibrary):
             },
             inner_properties={
                 "inner_scenarios": ElementProperty(
-                    PropertyType.lov,
+                    PropertyType.lov_no_default,
                     f"{{{__CTX_VAR_NAME}.get_scenarios(<tp:prop:{__SEL_SCENARIOS_PROP}>, "
                     + f"{__SCENARIO_SELECTOR_FILTER_VAR}<tp:uniq:sc>, "
                     + f"{__SCENARIO_SELECTOR_SORT_VAR}<tp:uniq:sc>)}}",
@@ -182,7 +182,7 @@ class _GuiCore(ElementLibrary):
             },
             inner_properties={
                 "inner_datanodes": ElementProperty(
-                    PropertyType.lov,
+                    PropertyType.lov_no_default,
                     f"{{{__CTX_VAR_NAME}.get_datanodes_tree(<tp:prop:{__DATANODE_SEL_SCENARIO_PROP}>, "
                     + f"<tp:prop:{__SEL_DATANODES_PROP}>, "
                     + f"{__DATANODE_SELECTOR_FILTER_VAR}<tp:uniq:dns>, "

+ 1 - 0
taipy/gui_core/__init__.py

@@ -9,4 +9,5 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+from ._adapters import CustomScenarioFilter, DataNodeFilter, DataNodeScenarioFilter, ScenarioFilter
 from ._init import *

+ 153 - 106
taipy/gui_core/_adapters.py

@@ -246,14 +246,18 @@ _operators: t.Dict[str, t.Callable] = {
 
 
 def _filter_value(base_val: t.Any, operator: t.Callable, val: t.Any, adapt: t.Optional[t.Callable] = None):
-    if isinstance(base_val, (datetime, date)):
-        base_val = base_val.isoformat()
-    val = adapt(base_val, val) if adapt else val
-    if isinstance(base_val, str) and isinstance(val, str):
-        base_val = base_val.lower()
-        val = val.lower()
+    if base_val is None:
+        base_val = "" if isinstance(val, str) else 0
+    else:
+        if isinstance(base_val, (datetime, date)):
+            base_val = base_val.isoformat()
+        val = adapt(base_val, val) if adapt else val
+        if isinstance(base_val, str) and isinstance(val, str):
+            base_val = base_val.lower()
+            val = val.lower()
     return operator(base_val, val)
 
+
 def _adapt_type(base_val, val):
     # try casting the filter to the value
     if isinstance(val, str) and not isinstance(base_val, str):
@@ -267,6 +271,7 @@ def _adapt_type(base_val, val):
                 pass
     return val
 
+
 def _filter_iterable(list_val: Iterable, operator: t.Callable, val: t.Any):
     if operator is contains:
         types = {type(v) for v in list_val}
@@ -285,17 +290,19 @@ def _invoke_action(
 ) -> bool:
     if ent is None:
         return False
-    if not (col_fn or col).isidentifier():
-        _warn(f'Error filtering with "{col_fn or col}": not a valid Python identifier.')
-        return True
     try:
         if col_type == "any":
             # when a property is not found, return True only if action is not equals
             if not is_dn and not hasattr(ent, "properties") or not ent.properties.get(col_fn or col):
                 return action == "!="
         if op := _operators.get(action):
-            cur_val = attrgetter(col_fn or col)(ent)
-            cur_val = cur_val() if col_fn else cur_val
+            if callable(col):
+                cur_val = col(ent)
+            else:
+                cur_val = attrgetter(col_fn or col)(ent)
+                cur_val = cur_val() if col_fn else cur_val
+            if isinstance(cur_val, DataNode):
+                cur_val = cur_val.read()
             if not isinstance(cur_val, str) and isinstance(cur_val, Iterable):
                 return _filter_iterable(cur_val, op, val)
             return _filter_value(cur_val, op, val, _adapt_type)
@@ -341,26 +348,76 @@ def _get_entity_property(col: str, *types: t.Type):
     return sort_key
 
 
-def _get_datanode_property(attr: str):
-    if (parts := attr.split(".")) and len(parts) > 1:
-        return parts[1]
-    return None
+@dataclass
+class _Filter(_DoNotUpdate):
+    label: str
+    property_type: t.Optional[t.Type]
 
+    def get_property(self):
+        return self.label
+
+    def get_type(self):
+        if self.property_type is bool:
+            return "boolean"
+        elif self.property_type is int or self.property_type is float:
+            return "number"
+        elif self.property_type is datetime or self.property_type is date:
+            return "date"
+        elif self.property_type is str:
+            return "str"
+        return "any"
+
+
+@dataclass
+class ScenarioFilter(_Filter):
+    property_id: str
+
+    def get_property(self):
+        return self.property_id
 
-class _GuiCoreProperties(ABC):
-    @staticmethod
-    @abstractmethod
-    def get_type(attr: str):
-        raise NotImplementedError
+
+@dataclass
+class DataNodeScenarioFilter(_Filter):
+    datanode_config_id: str
+    property_id: str
+
+    def get_property(self):
+        return f"{self.datanode_config_id}.{self.property_id}"
+
+
+_CUSTOM_PREFIX = "fn:"
+
+
+@dataclass
+class CustomScenarioFilter(_Filter):
+    filter_function: t.Callable[[Scenario], t.Any]
+
+    def __post_init__(self):
+        if self.filter_function.__name__ == "<lambda>":
+            raise TypeError("ScenarioCustomFilter does not support lambda functions.")
+        mod = self.filter_function.__module__
+        self.module = mod if isinstance(mod, str) else mod.__name__
+
+    def get_property(self):
+        return f"{_CUSTOM_PREFIX}{self.module}:{self.filter_function.__name__}"
 
     @staticmethod
-    @abstractmethod
-    def get_col_name(attr: str):
-        raise NotImplementedError
+    def _get_custom(col: str) -> t.Optional[t.List[str]]:
+        return col[len(_CUSTOM_PREFIX) :].split(":") if col.startswith(_CUSTOM_PREFIX) else None
+
+
+@dataclass
+class DataNodeFilter(_Filter):
+    property_id: str
 
+    def get_property(self):
+        return self.property_id
+
+
+class _GuiCoreProperties(ABC):
     @staticmethod
     @abstractmethod
-    def get_default_list():
+    def get_default_list() -> t.List[_Filter]:
         raise NotImplementedError
 
     @staticmethod
@@ -380,18 +437,32 @@ class _GuiCoreProperties(ABC):
                 return None
         if isinstance(data, str):
             data = data.split(";")
+        if isinstance(data, _Filter):
+            data = (data,)
         if isinstance(data, (list, tuple)):
-            flist = []
+            flist: t.List[_Filter] = []  # type: ignore[annotation-unchecked]
             for f in data:
-                if f == "*":
-                    flist.extend(self.get_default_list())
-                else:
+                if isinstance(f, str):
+                    f = f.strip()
+                    if f == "*":
+                        flist.extend(p.filter for p in self.get_default_list())
+                    elif f:
+                        flist.append(
+                            next((p.filter for p in self.get_default_list() if p.get_property() == f), _Filter(f))
+                        )
+                elif isinstance(f, _Filter):
                     flist.append(f)
             return json.dumps(
                 [
-                    (attr, self.get_type(attr), self.get_enums().get(attr)) if self.full_desc() else (attr,)
+                    (
+                        attr.label,
+                        attr.get_property(),
+                        attr.get_type(),
+                        self.get_enums().get(attr.get_property()),
+                    )
+                    if self.full_desc()
+                    else (attr.label, attr.get_property())
                     for attr in flist
-                    if attr
                 ]
             )
         return None
@@ -399,63 +470,57 @@ class _GuiCoreProperties(ABC):
 
 @dataclass(frozen=True)
 class _GuiCorePropDesc:
-    attr: str
-    type: str
+    filter: _Filter
     extended: bool = False
     for_sort: bool = False
 
 
-_EMPTY_PROP_DESC = _GuiCorePropDesc("", "any")
-
-
 class _GuiCoreScenarioProperties(_GuiCoreProperties):
-    _SC_PROPS: t.Dict[str, _GuiCorePropDesc] = {
-        "Config id": _GuiCorePropDesc("config_id", "string", for_sort=True),
-        "Label": _GuiCorePropDesc("get_simple_label()", "string", for_sort=True),
-        "Creation date": _GuiCorePropDesc("creation_date", "date", for_sort=True),
-        "Cycle label": _GuiCorePropDesc("cycle.name", "string", extended=True),
-        "Cycle start": _GuiCorePropDesc("cycle.start_date", "date", extended=True),
-        "Cycle end": _GuiCorePropDesc("cycle.end_date", "date", extended=True),
-        "Primary": _GuiCorePropDesc("is_primary", "boolean", extended=True),
-        "Tags": _GuiCorePropDesc("tags", "string"),
-    }
-    __DN_PROPS = {
-        "Up to date": _GuiCorePropDesc("is_up_to_date", "boolean"),
-        "Valid": _GuiCorePropDesc("is_valid", "boolean"),
-        "Last edit date": _GuiCorePropDesc("last_edit_date", "date"),
-    }
+    _SC_PROPS: t.List[_GuiCorePropDesc] = [
+        _GuiCorePropDesc(ScenarioFilter("Config id", str, "config_id"), for_sort=True),
+        _GuiCorePropDesc(ScenarioFilter("Label", str, "get_simple_label()"), for_sort=True),
+        _GuiCorePropDesc(ScenarioFilter("Creation date", datetime, "creation_date"), for_sort=True),
+        _GuiCorePropDesc(ScenarioFilter("Cycle label", str, "cycle.name"), extended=True),
+        _GuiCorePropDesc(ScenarioFilter("Cycle start", datetime, "cycle.start_date"), extended=True),
+        _GuiCorePropDesc(ScenarioFilter("Cycle end", datetime, "cycle.end_date"), extended=True),
+        _GuiCorePropDesc(ScenarioFilter("Primary", bool, "is_primary"), extended=True),
+        _GuiCorePropDesc(ScenarioFilter("Tags", str, "tags")),
+    ]
     __ENUMS = None
     __SC_CYCLE = None
 
     @staticmethod
-    def get_type(attr: str):
-        if prop := _get_datanode_property(attr):
-            return _GuiCoreScenarioProperties.__DN_PROPS.get(prop, _EMPTY_PROP_DESC).type
-        return _GuiCoreScenarioProperties._SC_PROPS.get(attr, _EMPTY_PROP_DESC).type
-
-    @staticmethod
-    def get_col_name(attr: str):
-        if prop := _get_datanode_property(attr):
-            return (
-                attr.split(".")[0]
-                + f".{_GuiCoreScenarioProperties.__DN_PROPS.get(prop, _EMPTY_PROP_DESC).attr or prop}"
+    def is_datanode_property(attr: str):
+        if "." not in attr:
+            return False
+        return (
+            next(
+                (
+                    p
+                    for p in _GuiCoreScenarioProperties._SC_PROPS
+                    if t.cast(ScenarioFilter, p.filter).property_id == attr
+                ),
+                None,
             )
-        return _GuiCoreScenarioProperties._SC_PROPS.get(attr, _EMPTY_PROP_DESC).attr or attr
+            is None
+        )
 
     def get_enums(self):
+        if not self.full_desc():
+            return {}
         if _GuiCoreScenarioProperties.__ENUMS is None:
             _GuiCoreScenarioProperties.__ENUMS = {
                 k: v
                 for k, v in {
-                    "Config id": [c for c in Config.scenarios.keys() if c != "default"],
-                    "Tags": list(
+                    "config_id": [c for c in Config.scenarios.keys() if c != "default"],
+                    "tags": list(
                         {t for s in Config.scenarios.values() for t in s.properties.get("authorized_tags", [])}
                     ),
                 }.items()
                 if len(v)
             }
 
-        return _GuiCoreScenarioProperties.__ENUMS if self.full_desc() else {}
+        return _GuiCoreScenarioProperties.__ENUMS
 
     @staticmethod
     def has_cycle():
@@ -467,10 +532,8 @@ class _GuiCoreScenarioProperties(_GuiCoreProperties):
 
 
 class _GuiCoreScenarioFilter(_GuiCoreScenarioProperties, _TaipyBase):
-    DEFAULT = list(_GuiCoreScenarioProperties._SC_PROPS.keys())
-    DEFAULT_NO_CYCLE = [
-        p[0] for p in filter(lambda prop: not prop[1].extended, _GuiCoreScenarioProperties._SC_PROPS.items())
-    ]
+    DEFAULT = _GuiCoreScenarioProperties._SC_PROPS
+    DEFAULT_NO_CYCLE = list(filter(lambda prop: not prop.extended, _GuiCoreScenarioProperties._SC_PROPS))
 
     @staticmethod
     def full_desc():
@@ -490,13 +553,10 @@ class _GuiCoreScenarioFilter(_GuiCoreScenarioProperties, _TaipyBase):
 
 
 class _GuiCoreScenarioSort(_GuiCoreScenarioProperties, _TaipyBase):
-    DEFAULT = [p[0] for p in filter(lambda prop: prop[1].for_sort, _GuiCoreScenarioProperties._SC_PROPS.items())]
-    DEFAULT_NO_CYCLE = [
-        p[0]
-        for p in filter(
-            lambda prop: prop[1].for_sort and not prop[1].extended, _GuiCoreScenarioProperties._SC_PROPS.items()
-        )
-    ]
+    DEFAULT = list(filter(lambda prop: prop.for_sort, _GuiCoreScenarioProperties._SC_PROPS))
+    DEFAULT_NO_CYCLE = list(
+        filter(lambda prop: prop.for_sort and not prop.extended, _GuiCoreScenarioProperties._SC_PROPS)
+    )
 
     @staticmethod
     def full_desc():
@@ -516,27 +576,19 @@ class _GuiCoreScenarioSort(_GuiCoreScenarioProperties, _TaipyBase):
 
 
 class _GuiCoreDatanodeProperties(_GuiCoreProperties):
-    _DN_PROPS: t.Dict[str, _GuiCorePropDesc] = {
-        "Config id": _GuiCorePropDesc("config_id", "string", for_sort=True),
-        "Label": _GuiCorePropDesc("get_simple_label()", "string", for_sort=True),
-        "Up to date": _GuiCorePropDesc("is_up_to_date", "boolean"),
-        "Last edit date": _GuiCorePropDesc("last_edit_date", "date", for_sort=True),
-        "Input": _GuiCorePropDesc("is_input", "boolean"),
-        "Output": _GuiCorePropDesc("is_output", "boolean"),
-        "Intermediate": _GuiCorePropDesc("is_intermediate", "boolean"),
-        "Expiration date": _GuiCorePropDesc("expiration_date", "date", extended=True, for_sort=True),
-        "Expired": _GuiCorePropDesc("is_expired", "boolean", extended=True),
-    }
+    _DN_PROPS: t.List[_GuiCorePropDesc] = [
+        _GuiCorePropDesc(DataNodeFilter("Config id", str, "config_id"), for_sort=True),
+        _GuiCorePropDesc(DataNodeFilter("Label", str, "get_simple_label()"), for_sort=True),
+        _GuiCorePropDesc(DataNodeFilter("Up to date", bool, "is_up_to_date")),
+        _GuiCorePropDesc(DataNodeFilter("Last edit date", datetime, "last_edit_date"), for_sort=True),
+        _GuiCorePropDesc(DataNodeFilter("Input", bool, "is_input")),
+        _GuiCorePropDesc(DataNodeFilter("Output", bool, "is_output")),
+        _GuiCorePropDesc(DataNodeFilter("Intermediate", bool, "is_intermediate")),
+        _GuiCorePropDesc(DataNodeFilter("Expiration date", datetime, "expiration_date"), extended=True, for_sort=True),
+        _GuiCorePropDesc(DataNodeFilter("Expired", bool, "is_expired"), extended=True),
+    ]
     __DN_VALIDITY = None
 
-    @staticmethod
-    def get_type(attr: str):
-        return _GuiCoreDatanodeProperties._DN_PROPS.get(attr, _EMPTY_PROP_DESC).type
-
-    @staticmethod
-    def get_col_name(attr: str):
-        return _GuiCoreDatanodeProperties._DN_PROPS.get(attr, _EMPTY_PROP_DESC).attr or attr
-
     @staticmethod
     def has_validity():
         if _GuiCoreDatanodeProperties.__DN_VALIDITY is None:
@@ -547,10 +599,8 @@ class _GuiCoreDatanodeProperties(_GuiCoreProperties):
 
 
 class _GuiCoreDatanodeFilter(_GuiCoreDatanodeProperties, _TaipyBase):
-    DEFAULT = list(_GuiCoreDatanodeProperties._DN_PROPS.keys())
-    DEFAULT_NO_VALIDITY = [
-        p[0] for p in filter(lambda prop: not prop[1].extended, _GuiCoreDatanodeProperties._DN_PROPS.items())
-    ]
+    DEFAULT = _GuiCoreDatanodeProperties._DN_PROPS
+    DEFAULT_NO_VALIDITY = list(filter(lambda prop: not prop.extended, _GuiCoreDatanodeProperties._DN_PROPS))
 
     @staticmethod
     def full_desc():
@@ -570,13 +620,10 @@ class _GuiCoreDatanodeFilter(_GuiCoreDatanodeProperties, _TaipyBase):
 
 
 class _GuiCoreDatanodeSort(_GuiCoreDatanodeProperties, _TaipyBase):
-    DEFAULT = [p[0] for p in filter(lambda prop: prop[1].for_sort, _GuiCoreDatanodeProperties._DN_PROPS.items())]
-    DEFAULT_NO_VALIDITY = [
-        p[0]
-        for p in filter(
-            lambda prop: prop[1].for_sort and not prop[1].extended, _GuiCoreDatanodeProperties._DN_PROPS.items()
-        )
-    ]
+    DEFAULT = list(filter(lambda prop: prop.for_sort, _GuiCoreDatanodeProperties._DN_PROPS))
+    DEFAULT_NO_VALIDITY = list(
+        filter(lambda prop: prop.for_sort and not prop.extended, _GuiCoreDatanodeProperties._DN_PROPS)
+    )
 
     @staticmethod
     def full_desc():

+ 29 - 12
taipy/gui_core/_context.py

@@ -12,7 +12,6 @@
 import json
 import typing as t
 from collections import defaultdict
-from datetime import datetime
 from numbers import Number
 from threading import Lock
 
@@ -62,11 +61,10 @@ from taipy.gui._warnings import _warn
 from taipy.gui.gui import _DoNotUpdate
 
 from ._adapters import (
+    CustomScenarioFilter,
     _EntityType,
-    _get_datanode_property,
     _get_entity_property,
     _GuiCoreDatanodeAdapter,
-    _GuiCoreDatanodeProperties,
     _GuiCoreScenarioProperties,
     _invoke_action,
 )
@@ -286,7 +284,6 @@ class _GuiCoreContext(CoreEventConsumerBase):
             sorted_list = entities
             for sd in reversed(sorts):
                 col = sd.get("col", "")
-                col = _GuiCoreScenarioProperties.get_col_name(col)
                 order = sd.get("order", True)
                 sorted_list = sorted(sorted_list, key=_get_entity_property(col, Scenario, Cycle), reverse=not order)
         else:
@@ -304,12 +301,24 @@ class _GuiCoreContext(CoreEventConsumerBase):
         filtered_list = list(entities)
         for fd in filters:
             col = fd.get("col", "")
-            is_datanode_prop = _get_datanode_property(col) is not None
-            col_type = _GuiCoreScenarioProperties.get_type(col)
-            col = _GuiCoreScenarioProperties.get_col_name(col)
+            is_datanode_prop = _GuiCoreScenarioProperties.is_datanode_property(col)
+            col_type = fd.get("type", "no type")
             col_fn = cp[0] if (cp := col.split("(")) and len(cp) > 1 else None
             val = fd.get("value")
             action = fd.get("action", "")
+            customs = CustomScenarioFilter._get_custom(col)
+            if customs:
+                with self.gui._set_locals_context(customs[0] or None):
+                    fn = self.gui._get_user_function(customs[1])
+                    if callable(fn):
+                        col = fn
+            if (
+                isinstance(col, str)
+                and next(filter(lambda s: not s.isidentifier(), (col_fn or col).split(".")), False) is True
+            ):
+                _warn(f'Error filtering with "{col_fn or col}": not a valid Python identifier.')
+                continue
+
             # level 1 filtering
             filtered_list = [
                 e
@@ -606,13 +615,22 @@ class _GuiCoreContext(CoreEventConsumerBase):
         filtered_list = list(entities)
         for fd in filters:
             col = fd.get("col", "")
-            col_type = _GuiCoreDatanodeProperties.get_type(col)
-            col = _GuiCoreDatanodeProperties.get_col_name(col)
+            col_type = fd.get("type", "no type")
             col_fn = cp[0] if (cp := col.split("(")) and len(cp) > 1 else None
             val = fd.get("value")
             action = fd.get("action", "")
-            if isinstance(val, str) and col_type == "date":
-                val = datetime.fromisoformat(val[:-1])
+            customs = CustomScenarioFilter._get_custom(col)
+            if customs:
+                with self.gui._set_locals_context(customs[0] or None):
+                    fn = self.gui._get_user_function(customs[1])
+                    if callable(fn):
+                        col = fn
+            if (
+                isinstance(col, str)
+                and next(filter(lambda s: not s.isidentifier(), (col_fn or col).split(".")), False) is True
+            ):
+                _warn(f'Error filtering with "{col_fn or col}": not a valid Python identifier.')
+                continue
             # level 1 filtering
             filtered_list = [
                 e
@@ -642,7 +660,6 @@ class _GuiCoreContext(CoreEventConsumerBase):
             sorted_list = entities
             for sd in reversed(sorts):
                 col = sd.get("col", "")
-                col = _GuiCoreDatanodeProperties.get_col_name(col)
                 order = sd.get("order", True)
                 sorted_list = sorted(sorted_list, key=_get_entity_property(col, DataNode), reverse=not order)
         else:

+ 8 - 8
taipy/gui_core/viselements.json

@@ -100,8 +100,8 @@
                     },
                     {
                         "name": "filter",
-                        "type": "bool|str|list[str]",
-                        "default_value": "\"Config id;Label;Creation date;Cycle label;Cycle start;Cycle end;Primary;Tags\"",
+                        "type": "bool|str|ScenarioFilter|list[str|ScenarioFilter]",
+                        "default_value": "\"*\"",
                         "doc": "TODO: a list of <code>Scenario^</code> attributes to filter on. If False, do not allow filter."
                     },
                     {
@@ -112,8 +112,8 @@
                     },
                     {
                         "name": "sort",
-                        "type": "bool|str|list[str]",
-                        "default_value": "\"Config id;Label;Creation date\"",
+                        "type": "bool|str|ScenarioFilter|list[str|ScenarioFilter]",
+                        "default_value": "\"*\"",
                         "doc": "TODO: a list of <code>Scenario^</code> attributes to sort on. If False, do not allow sort."
                     }
                 ]
@@ -355,8 +355,8 @@
                     },
                     {
                         "name": "filter",
-                        "type": "bool|str|list[str]",
-                        "default_value": "\"Config id;Label;Up to date;Last edit date;Input;Output;Intermediate;Expiration date;Expired\"",
+                        "type": "bool|str|DataNodeFilter|list[str|DataNodeFilter]",
+                        "default_value": "\"*\"",
                         "doc": "TODO: a list of <code>DataNode^</code> attributes to filter on. If False, do not allow filter."
                     },
                     {
@@ -367,8 +367,8 @@
                     },
                     {
                         "name": "sort",
-                        "type": "bool|str|list[str]",
-                        "default_value": "\"Config id;Label;Last edit date;Expiration date\"",
+                        "type": "bool|str|DataNodeFilter|list[str|DataNodeFilter]",
+                        "default_value": "\"*\"",
                         "doc": "TODO: a list of <code>DataNode^</code> attributes to sort on. If False, do not allow sort."
                     }
                 ]

+ 2 - 2
taipy/templates/README.md

@@ -26,7 +26,7 @@ and limitations under the License.
 Taipy is a Python library for creating Business Applications. More information on our
 [website](https://www.taipy.io).
 
-Taipy templates is a repository that contains templates and scaffoldings created and
+Taipy templates is a repository that contains application templates created and
 maintained by Taipy. It helps users getting started with a simple and ready-to-go application.
 
 A more in depth documentation of taipy can be found [here](https://docs.taipy.io).
@@ -38,7 +38,7 @@ taipy create
 ```
 or
 ```bash
-taipy create --template "default"
+taipy create --application "default"
 ```
 
 After providing necessary information, your new application is created in the current

+ 2 - 2
taipy/templates/package_desc.md

@@ -19,7 +19,7 @@ Taipy is a Python library for creating Business Applications. More information o
 [website](https://www.taipy.io). Taipy is split into multiple packages including *taipy-templates*
 to let users install the minimum they need.
 
-Taipy templates is a repository that contains templates created and
+Taipy templates is a repository that contains application templates created and
 maintained by Taipy. It helps users getting started with a simple and ready-to-go application.
 
 To create a Taipy application using this template, first you need to install Taipy (> 2.2).
@@ -29,7 +29,7 @@ taipy create
 ```
 or
 ```bash
-taipy create --template "default"
+taipy create --application "default"
 ```
 
 After providing necessary information, your new application is created in the current

+ 1 - 1
tests/cli/test_help_cli.py

@@ -58,7 +58,7 @@ def test_help_non_existed_command(caplog):
 
 
 def test_taipy_create_help(capsys):
-    expected_help = "create [-h] [--template"
+    expected_help = "create [-h] [--application"
 
     with patch("sys.argv", ["prog", "help", "create"]):
         with pytest.raises(SystemExit):

+ 0 - 99
tests/core/_entity/test_migrate_cli.py

@@ -12,7 +12,6 @@
 import filecmp
 import os
 import shutil
-from sqlite3 import OperationalError
 from unittest.mock import patch
 
 import mongomock
@@ -162,104 +161,6 @@ def test_migrate_fs_non_existing_folder(caplog):
     assert "Folder 'non-existing-folder' does not exist." in caplog.text
 
 
-@patch("taipy.core._entity._migrate_cli._migrate_sql_entities")
-def test_migrate_sql_specified_path(_migrate_sql_entities_mock, tmp_sqlite):
-    _MigrateCLI.create_parser()
-
-    # Test the _migrate_sql_entities is called once with the correct path
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite, "--skip-backup"]):
-            _MigrateCLI.handle_command()
-            assert _migrate_sql_entities_mock.assert_called_once_with(path=tmp_sqlite)
-
-
-def test_migrate_sql_backup_and_remove(caplog, tmp_sqlite):
-    _MigrateCLI.create_parser()
-
-    # Create the .sqlite file to test
-    with open(tmp_sqlite, "w") as f:
-        f.write("")
-
-    file_name, file_extension = tmp_sqlite.rsplit(".", 1)
-    backup_sqlite = f"{file_name}_backup.{file_extension}"
-
-    # Remove backup when it does not exist should raise an error
-    with pytest.raises(SystemExit) as err:
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite, "--remove-backup"]):
-            _MigrateCLI.handle_command()
-    assert err.value.code == 1
-    assert f"The backup database '{backup_sqlite}' does not exist." in caplog.text
-    assert not os.path.exists(backup_sqlite)
-
-    # Run without --skip-backup to create the backup database
-    with pytest.raises((SystemExit, OperationalError)):
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite]):
-            _MigrateCLI.handle_command()
-
-    assert os.path.exists(backup_sqlite)
-
-    # Remove backup
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite, "--remove-backup"]):
-            _MigrateCLI.handle_command()
-    assert f"Removed backup entities from the backup database '{backup_sqlite}'." in caplog.text
-    assert not os.path.exists(backup_sqlite)
-
-
-def test_migrate_sql_backup_and_restore(caplog, tmp_sqlite):
-    _MigrateCLI.create_parser()
-
-    # Create the .sqlite file to test
-    with open(tmp_sqlite, "w") as f:
-        f.write("")
-
-    file_name, file_extension = tmp_sqlite.rsplit(".", 1)
-    backup_sqlite = f"{file_name}_backup.{file_extension}"
-
-    # Restore backup when it does not exist should raise an error
-    with pytest.raises(SystemExit) as err:
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite, "--restore"]):
-            _MigrateCLI.handle_command()
-    assert err.value.code == 1
-    assert f"The backup database '{backup_sqlite}' does not exist." in caplog.text
-    assert not os.path.exists(backup_sqlite)
-
-    # Run without --skip-backup to create the backup database
-    with pytest.raises((SystemExit, OperationalError)):
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite]):
-            _MigrateCLI.handle_command()
-
-    assert os.path.exists(backup_sqlite)
-
-    # Restore the backup
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite, "--restore"]):
-            _MigrateCLI.handle_command()
-    assert f"Restored entities from the backup database '{backup_sqlite}' to '{tmp_sqlite}'." in caplog.text
-    assert not os.path.exists(backup_sqlite)
-
-
-def test_migrate_sql_non_existing_path(caplog):
-    _MigrateCLI.create_parser()
-
-    # Test migrate without providing a path
-    with pytest.raises(SystemExit) as err:
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql"]):
-            _MigrateCLI.handle_command()
-
-    assert err.value.code == 1
-    assert "Missing the required sqlite path." in caplog.text
-
-    caplog.clear()
-
-    # Test migrate with a non-existing-path.sqlite file
-    with pytest.raises(SystemExit) as err:
-        with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", "non-existing-path.sqlite"]):
-            _MigrateCLI.handle_command()
-    assert err.value.code == 1
-    assert "File 'non-existing-path.sqlite' does not exist." in caplog.text
-
-
 @patch("taipy.core._entity._migrate_cli._migrate_mongo_entities")
 def test_call_to_migrate_mongo(_migrate_mongo_entities_mock):
     _MigrateCLI.create_parser()

+ 0 - 16
tests/core/conftest.py

@@ -27,7 +27,6 @@ from taipy.config.common.scope import Scope
 from taipy.config.config import Config
 from taipy.core._core import Core
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
-from taipy.core._repository.db._sql_connection import _SQLConnection
 from taipy.core._version._version import _Version
 from taipy.core._version._version_manager_factory import _VersionManagerFactory
 from taipy.core.config import (
@@ -402,18 +401,3 @@ def init_notifier():
 @pytest.fixture
 def sql_engine():
     return create_engine("sqlite:///:memory:")
-
-
-@pytest.fixture
-def init_sql_repo(tmp_sqlite, init_managers):
-    Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-
-    # Clean SQLite database
-    if _SQLConnection._connection:
-        _SQLConnection._connection.close()
-        _SQLConnection._connection = None
-    _SQLConnection.init_db()
-
-    init_managers()
-
-    return tmp_sqlite

+ 0 - 247
tests/core/cycle/test_cycle_manager_with_sql_repo.py

@@ -1,247 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-from datetime import datetime
-
-from taipy.config.common.frequency import Frequency
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core.cycle._cycle_manager import _CycleManager
-from taipy.core.cycle._cycle_manager_factory import _CycleManagerFactory
-from taipy.core.cycle.cycle import Cycle
-from taipy.core.cycle.cycle_id import CycleId
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.job._job_manager import _JobManager
-from taipy.core.scenario._scenario_manager import _ScenarioManager
-from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory
-from taipy.core.sequence._sequence_manager import _SequenceManager
-from taipy.core.task._task_manager import _TaskManager
-
-
-def test_save_and_get_cycle_entity(init_sql_repo, cycle, current_datetime):
-    _CycleManager._repository = _CycleManagerFactory._build_repository()
-
-    _CycleManager._delete_all()
-    assert len(_CycleManager._get_all()) == 0
-
-    _CycleManager._set(cycle)
-    assert _CycleManager._exists(cycle.id)
-
-    cycle_1 = _CycleManager._get(cycle.id)
-
-    assert cycle_1.id == cycle.id
-    assert cycle_1.name == cycle.name
-    assert cycle_1.properties == cycle.properties
-    assert cycle_1.creation_date == cycle.creation_date
-    assert cycle_1.start_date == cycle.start_date
-    assert cycle_1.end_date == cycle.end_date
-    assert cycle_1.frequency == cycle.frequency
-
-    assert len(_CycleManager._get_all()) == 1
-    assert _CycleManager._get(cycle.id) == cycle
-    assert _CycleManager._get(cycle.id).name == cycle.name
-    assert isinstance(_CycleManager._get(cycle.id).creation_date, datetime)
-    assert _CycleManager._get(cycle.id).creation_date == cycle.creation_date
-    assert _CycleManager._get(cycle.id).frequency == Frequency.DAILY
-
-    cycle_2_id = CycleId("cycle_2")
-    assert _CycleManager._get(cycle_2_id) is None
-    assert not _CycleManager._exists(cycle_2_id)
-
-    cycle_3 = Cycle(
-        Frequency.MONTHLY,
-        {},
-        creation_date=current_datetime,
-        start_date=current_datetime,
-        end_date=current_datetime,
-        name="bar",
-        id=cycle_1.id,
-    )
-
-    _CycleManager._set(cycle_3)
-
-    cycle_3 = _CycleManager._get(cycle_1.id)
-
-    assert _CycleManager._exists(cycle_1.id)
-    assert len(_CycleManager._get_all()) == 1
-    assert cycle_3.id == cycle_1.id
-    assert cycle_3.name == cycle_3.name
-    assert cycle_3.properties == cycle_3.properties
-    assert cycle_3.creation_date == current_datetime
-    assert cycle_3.start_date == current_datetime
-    assert cycle_3.end_date == current_datetime
-    assert cycle_3.frequency == cycle_3.frequency
-
-
-def test_create_and_delete_cycle_entity(init_sql_repo):
-    _CycleManager._repository = _CycleManagerFactory._build_repository()
-
-    _CycleManager._delete_all()
-    assert len(_CycleManager._get_all()) == 0
-
-    cycle_1 = _CycleManager._create(Frequency.DAILY, name="foo", key="value", display_name="foo")
-
-    assert cycle_1.id is not None
-    assert cycle_1.name == "foo"
-    assert cycle_1.properties == {"key": "value", "display_name": "foo"}
-    assert cycle_1.creation_date is not None
-    assert cycle_1.start_date is not None
-    assert cycle_1.end_date is not None
-    assert cycle_1.start_date < cycle_1.creation_date < cycle_1.end_date
-    assert cycle_1.key == "value"
-    assert cycle_1.frequency == Frequency.DAILY
-
-    cycle_1_id = cycle_1.id
-
-    assert _CycleManager._exists(cycle_1.id)
-    assert len(_CycleManager._get_all()) == 1
-    assert _CycleManager._get(cycle_1_id) == cycle_1
-    assert _CycleManager._get(cycle_1_id).name == "foo"
-    assert isinstance(_CycleManager._get(cycle_1_id).creation_date, datetime)
-    assert _CycleManager._get(cycle_1_id).frequency == Frequency.DAILY
-
-    cycle_2_id = CycleId("cycle_2")
-    assert _CycleManager._get(cycle_2_id) is None
-    assert not _CycleManager._exists(cycle_2_id)
-
-    cycle_3 = _CycleManager._create(Frequency.MONTHLY, "bar")
-
-    assert cycle_3.id is not None
-    assert cycle_3.name == "bar"
-    assert isinstance(cycle_3.creation_date, datetime)
-    assert cycle_3.frequency == Frequency.MONTHLY
-
-    cycle_3_id = cycle_3.id
-
-    assert _CycleManager._exists(cycle_3_id)
-    assert len(_CycleManager._get_all()) == 2
-    assert _CycleManager._get(cycle_3_id).name == "bar"
-
-    cycle_4 = _CycleManager._create(Frequency.YEARLY, "baz")
-    cycle_4_id = cycle_4.id
-
-    assert _CycleManager._exists(cycle_4_id)
-    assert len(_CycleManager._get_all()) == 3
-
-    _CycleManager._delete(cycle_4_id)
-
-    assert len(_CycleManager._get_all()) == 2
-    assert not _CycleManager._exists(cycle_4_id)
-    assert _CycleManager._get(cycle_4_id) is None
-
-    _CycleManager._delete_all()
-    assert len(_CycleManager._get_all()) == 0
-    assert not any(_CycleManager._exists(cycle_id) for cycle_id in [cycle_1_id, cycle_3_id, cycle_4_id])
-
-
-def test_get_cycle_start_date_and_end_date(init_sql_repo):
-    _CycleManager._repository = _CycleManagerFactory._build_repository()
-
-    _CycleManager._delete_all()
-
-    creation_date_1 = datetime.fromisoformat("2021-11-11T11:11:01.000001")
-
-    daily_start_date_1 = _CycleManager._get_start_date_of_cycle(Frequency.DAILY, creation_date=creation_date_1)
-    weekly_start_date_1 = _CycleManager._get_start_date_of_cycle(Frequency.WEEKLY, creation_date=creation_date_1)
-    monthly_start_date_1 = _CycleManager._get_start_date_of_cycle(Frequency.MONTHLY, creation_date=creation_date_1)
-    yearly_start_date_1 = _CycleManager._get_start_date_of_cycle(Frequency.YEARLY, creation_date=creation_date_1)
-
-    assert daily_start_date_1 == datetime.fromisoformat("2021-11-11T00:00:00.000000")
-    assert weekly_start_date_1 == datetime.fromisoformat("2021-11-08T00:00:00.000000")
-    assert monthly_start_date_1 == datetime.fromisoformat("2021-11-01T00:00:00.000000")
-    assert yearly_start_date_1 == datetime.fromisoformat("2021-01-01T00:00:00.000000")
-
-    daily_end_date_1 = _CycleManager._get_end_date_of_cycle(Frequency.DAILY, start_date=daily_start_date_1)
-    weekly_end_date_1 = _CycleManager._get_end_date_of_cycle(Frequency.WEEKLY, start_date=weekly_start_date_1)
-    monthly_end_date_1 = _CycleManager._get_end_date_of_cycle(Frequency.MONTHLY, start_date=monthly_start_date_1)
-    yearly_end_date_1 = _CycleManager._get_end_date_of_cycle(Frequency.YEARLY, start_date=yearly_start_date_1)
-
-    assert daily_end_date_1 == datetime.fromisoformat("2021-11-11T23:59:59.999999")
-    assert weekly_end_date_1 == datetime.fromisoformat("2021-11-14T23:59:59.999999")
-    assert monthly_end_date_1 == datetime.fromisoformat("2021-11-30T23:59:59.999999")
-    assert yearly_end_date_1 == datetime.fromisoformat("2021-12-31T23:59:59.999999")
-
-    creation_date_2 = datetime.now()
-
-    daily_start_date_2 = _CycleManager._get_start_date_of_cycle(Frequency.DAILY, creation_date=creation_date_2)
-    daily_end_date_2 = _CycleManager._get_end_date_of_cycle(Frequency.DAILY, daily_start_date_2)
-    assert daily_start_date_2.date() == creation_date_2.date()
-    assert daily_end_date_2.date() == creation_date_2.date()
-    assert daily_start_date_2 < creation_date_2 < daily_end_date_2
-
-    weekly_start_date_2 = _CycleManager._get_start_date_of_cycle(Frequency.WEEKLY, creation_date=creation_date_2)
-    weekly_end_date_2 = _CycleManager._get_end_date_of_cycle(Frequency.WEEKLY, weekly_start_date_2)
-    assert weekly_start_date_2 < creation_date_2 < weekly_end_date_2
-
-    monthly_start_date_2 = _CycleManager._get_start_date_of_cycle(Frequency.MONTHLY, creation_date=creation_date_2)
-    monthly_end_date_2 = _CycleManager._get_end_date_of_cycle(Frequency.MONTHLY, monthly_start_date_2)
-    assert monthly_start_date_2.month == creation_date_2.month and monthly_start_date_2.day == 1
-    assert monthly_end_date_2.month == creation_date_2.month
-    assert monthly_start_date_2 < creation_date_2 < monthly_end_date_2
-
-    yearly_start_date_2 = _CycleManager._get_start_date_of_cycle(Frequency.YEARLY, creation_date=creation_date_2)
-    yearly_end_date_2 = _CycleManager._get_end_date_of_cycle(Frequency.YEARLY, yearly_start_date_2)
-    assert yearly_start_date_2.year == creation_date_2.year
-    assert yearly_start_date_2 == datetime(creation_date_2.year, 1, 1)
-    assert yearly_end_date_2.year == creation_date_2.year
-    assert yearly_end_date_2.date() == datetime(creation_date_2.year, 12, 31).date()
-    assert yearly_start_date_2 < creation_date_2 < yearly_end_date_2
-
-
-def test_hard_delete_shared_entities(init_sql_repo):
-    _ScenarioManager._repository = _ScenarioManagerFactory._build_repository()
-
-    dn_config_1 = Config.configure_data_node("my_input_1", "in_memory", scope=Scope.SCENARIO, default_data="testing")
-    dn_config_2 = Config.configure_data_node("my_input_2", "in_memory", scope=Scope.SCENARIO, default_data="testing")
-    dn_config_3 = Config.configure_data_node("my_input_3", "in_memory", scope=Scope.CYCLE, default_data="testing")
-    dn_config_4 = Config.configure_data_node("my_input_4", "in_memory", scope=Scope.GLOBAL, default_data="testing")
-    task_config_1 = Config.configure_task("task_config_1", print, dn_config_1, dn_config_2)
-    task_config_2 = Config.configure_task("task_config_2", print, dn_config_2, dn_config_3)
-    task_config_3 = Config.configure_task("task_config_3", print, dn_config_3, dn_config_4)  # scope = global
-    creation_date = datetime.now()
-    scenario_config_1 = Config.configure_scenario(
-        "scenario_config_1",
-        [task_config_1, task_config_2, task_config_3],
-        creation_date=creation_date,
-        frequency=Frequency.DAILY,
-    )
-    scenario_config_1.add_sequences(
-        {
-            "sequence_1": [task_config_1, task_config_2],
-            "sequence_2": [task_config_1, task_config_2],
-            "sequence_3": [task_config_3],
-        }
-    )
-    scenario_config_2 = Config.configure_scenario(
-        "scenario_config_2", [task_config_2, task_config_3]
-    )  # No Frequency so cycle attached to scenarios
-    scenario_config_2.add_sequences({"sequence_3": [task_config_3]})
-
-    scenario_1 = _ScenarioManager._create(scenario_config_1)
-    scenario_2 = _ScenarioManager._create(scenario_config_1)
-    scenario_3 = _ScenarioManager._create(scenario_config_2)
-    scenario_1.submit()
-    scenario_2.submit()
-    scenario_3.submit()
-
-    assert len(_ScenarioManager._get_all()) == 3
-    assert len(_SequenceManager._get_all()) == 7
-    assert len(_TaskManager._get_all()) == 7
-    assert len(_DataManager._get_all()) == 8
-    assert len(_JobManager._get_all()) == 8
-    assert len(_CycleManager._get_all()) == 1
-    _CycleManager._hard_delete(scenario_1.cycle.id)
-    assert len(_CycleManager._get_all()) == 0
-    assert len(_ScenarioManager._get_all()) == 1
-    assert len(_SequenceManager._get_all()) == 1
-    assert len(_TaskManager._get_all()) == 2
-    assert len(_JobManager._get_all()) == 2
-    assert len(_DataManager._get_all()) == 3

+ 19 - 29
tests/core/cycle/test_cycle_repositories.py

@@ -14,15 +14,13 @@ import os
 import pytest
 
 from taipy.core.cycle._cycle_fs_repository import _CycleFSRepository
-from taipy.core.cycle._cycle_sql_repository import _CycleSQLRepository
 from taipy.core.cycle.cycle import Cycle, CycleId
 from taipy.core.exceptions import ModelNotFound
 
 
 class TestCycleRepositories:
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_save_and_load(self, cycle: Cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_save_and_load(self, cycle: Cycle):
+        repository = _CycleFSRepository()
         repository._save(cycle)
 
         loaded_cycle = repository._load(cycle.id)
@@ -35,17 +33,15 @@ class TestCycleRepositories:
         assert cycle.id == loaded_cycle.id
         assert cycle._properties == loaded_cycle._properties
 
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_exists(self, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_exists(self, cycle):
+        repository = _CycleFSRepository()
         repository._save(cycle)
 
         assert repository._exists(cycle.id)
         assert not repository._exists("not-existed-cycle")
 
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_load_all(self, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_load_all(self, cycle):
+        repository = _CycleFSRepository()
         for i in range(10):
             cycle.id = CycleId(f"cycle-{i}")
             repository._save(cycle)
@@ -53,9 +49,8 @@ class TestCycleRepositories:
 
         assert len(data_nodes) == 10
 
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_load_all_with_filters(self, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_load_all_with_filters(self, cycle):
+        repository = _CycleFSRepository()
 
         for i in range(10):
             cycle.id = CycleId(f"cycle-{i}")
@@ -65,9 +60,8 @@ class TestCycleRepositories:
 
         assert len(objs) == 1
 
-    @pytest.mark.parametrize("repo", [_CycleSQLRepository])
-    def test_delete(self, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_delete(self, cycle):
+        repository = _CycleFSRepository()
         repository._save(cycle)
 
         repository._delete(cycle.id)
@@ -75,9 +69,8 @@ class TestCycleRepositories:
         with pytest.raises(ModelNotFound):
             repository._load(cycle.id)
 
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_delete_all(self, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_delete_all(self, cycle):
+        repository = _CycleFSRepository()
 
         for i in range(10):
             cycle.id = CycleId(f"cycle-{i}")
@@ -89,9 +82,8 @@ class TestCycleRepositories:
 
         assert len(repository._load_all()) == 0
 
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_delete_many(self, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_delete_many(self, cycle):
+        repository = _CycleFSRepository()
 
         for i in range(10):
             cycle.id = CycleId(f"cycle-{i}")
@@ -104,9 +96,8 @@ class TestCycleRepositories:
 
         assert len(repository._load_all()) == 7
 
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_search(self, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_search(self, cycle):
+        repository = _CycleFSRepository()
 
         for i in range(10):
             cycle.id = CycleId(f"cycle-{i}")
@@ -119,11 +110,10 @@ class TestCycleRepositories:
         assert len(objs) == 1
         assert isinstance(objs[0], Cycle)
 
-    @pytest.mark.parametrize("repo", [_CycleFSRepository, _CycleSQLRepository])
-    def test_export(self, tmpdir, cycle, repo, init_sql_repo):
-        repository = repo()
+    def test_export(self, tmpdir, cycle):
+        repository = _CycleFSRepository()
         repository._save(cycle)
 
         repository._export(cycle.id, tmpdir.strpath)
-        dir_path = repository.dir_path if repo == _CycleFSRepository else os.path.join(tmpdir.strpath, "cycle")
+        dir_path = repository.dir_path
         assert os.path.exists(os.path.join(dir_path, f"{cycle.id}.json"))

+ 0 - 288
tests/core/data/test_data_manager_with_sql_repo.py

@@ -1,288 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import os
-import pathlib
-
-import pytest
-
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core._version._version_manager import _VersionManager
-from taipy.core.config.data_node_config import DataNodeConfig
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.data.csv import CSVDataNode
-from taipy.core.data.data_node_id import DataNodeId
-from taipy.core.data.in_memory import InMemoryDataNode
-from taipy.core.exceptions.exceptions import InvalidDataNodeType, ModelNotFound
-
-
-def file_exists(file_path: str) -> bool:
-    return os.path.exists(file_path)
-
-
-class TestDataManager:
-    def test_create_data_node_and_modify_properties_does_not_modify_config(self, init_sql_repo):
-        dn_config = Config.configure_data_node(id="name", foo="bar")
-        dn = _DataManager._create_and_set(dn_config, None, None)
-        assert dn_config.properties.get("foo") == "bar"
-        assert dn_config.properties.get("baz") is None
-
-        dn.properties["baz"] = "qux"
-        _DataManager._set(dn)
-        assert dn_config.properties.get("foo") == "bar"
-        assert dn_config.properties.get("baz") is None
-        assert dn.properties.get("foo") == "bar"
-        assert dn.properties.get("baz") == "qux"
-
-    def test_create_raises_exception_with_wrong_type(self, init_sql_repo):
-        wrong_type_dn_config = DataNodeConfig(id="foo", storage_type="bar", scope=DataNodeConfig._DEFAULT_SCOPE)
-        with pytest.raises(InvalidDataNodeType):
-            _DataManager._create_and_set(wrong_type_dn_config, None, None)
-
-    def test_create_from_same_config_generates_new_data_node_and_new_id(self, init_sql_repo):
-        dn_config = Config.configure_data_node(id="foo", storage_type="in_memory")
-        dn = _DataManager._create_and_set(dn_config, None, None)
-        dn_2 = _DataManager._create_and_set(dn_config, None, None)
-        assert dn_2.id != dn.id
-
-    def test_create_uses_overridden_attributes_in_config_file(self, init_sql_repo):
-        Config.override(os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/config.toml"))
-
-        csv_dn_cfg = Config.configure_data_node(id="foo", storage_type="csv", path="bar", has_header=True)
-        csv_dn = _DataManager._create_and_set(csv_dn_cfg, None, None)
-        assert csv_dn.config_id == "foo"
-        assert isinstance(csv_dn, CSVDataNode)
-        assert csv_dn._path == "path_from_config_file"
-        assert csv_dn.has_header
-
-        csv_dn_cfg = Config.configure_data_node(id="baz", storage_type="csv", path="bar", has_header=True)
-        csv_dn = _DataManager._create_and_set(csv_dn_cfg, None, None)
-        assert csv_dn.config_id == "baz"
-        assert isinstance(csv_dn, CSVDataNode)
-        assert csv_dn._path == "bar"
-        assert csv_dn.has_header
-
-    def test_get_if_not_exists(self, init_sql_repo):
-        with pytest.raises(ModelNotFound):
-            _DataManager._repository._load("test_data_node_2")
-
-    def test_get_all(self, init_sql_repo):
-        _DataManager._delete_all()
-        assert len(_DataManager._get_all()) == 0
-        dn_config_1 = Config.configure_data_node(id="foo", storage_type="in_memory")
-        _DataManager._create_and_set(dn_config_1, None, None)
-        assert len(_DataManager._get_all()) == 1
-        dn_config_2 = Config.configure_data_node(id="baz", storage_type="in_memory")
-        _DataManager._create_and_set(dn_config_2, None, None)
-        _DataManager._create_and_set(dn_config_2, None, None)
-        assert len(_DataManager._get_all()) == 3
-        assert len([dn for dn in _DataManager._get_all() if dn.config_id == "foo"]) == 1
-        assert len([dn for dn in _DataManager._get_all() if dn.config_id == "baz"]) == 2
-
-    def test_get_all_on_multiple_versions_environment(self, init_sql_repo):
-        # Create 5 data nodes with 2 versions each
-        # Only version 1.0 has the data node with config_id = "config_id_1"
-        # Only version 2.0 has the data node with config_id = "config_id_6"
-        for version in range(1, 3):
-            for i in range(5):
-                _DataManager._set(
-                    InMemoryDataNode(
-                        f"config_id_{i+version}",
-                        Scope.SCENARIO,
-                        id=DataNodeId(f"id{i}_v{version}"),
-                        version=f"{version}.0",
-                    )
-                )
-
-        _VersionManager._set_experiment_version("1.0")
-        assert len(_DataManager._get_all()) == 5
-        assert len(_DataManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_1"}])) == 1
-        assert len(_DataManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_6"}])) == 0
-
-        _VersionManager._set_development_version("1.0")
-        assert len(_DataManager._get_all()) == 5
-        assert len(_DataManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_1"}])) == 1
-        assert len(_DataManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_6"}])) == 0
-
-        _VersionManager._set_experiment_version("2.0")
-        assert len(_DataManager._get_all()) == 5
-        assert len(_DataManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_1"}])) == 0
-        assert len(_DataManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_6"}])) == 1
-
-        _VersionManager._set_development_version("2.0")
-        assert len(_DataManager._get_all()) == 5
-        assert len(_DataManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_1"}])) == 0
-        assert len(_DataManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_6"}])) == 1
-
-    def test_set(self, init_sql_repo):
-        dn = InMemoryDataNode(
-            "config_id",
-            Scope.SCENARIO,
-            id=DataNodeId("id"),
-            owner_id=None,
-            parent_ids={"task_id_1"},
-            last_edit_date=None,
-            edits=[],
-            edit_in_progress=False,
-            properties={"foo": "bar"},
-        )
-        assert len(_DataManager._get_all()) == 0
-        assert not _DataManager._exists(dn.id)
-        _DataManager._set(dn)
-        assert len(_DataManager._get_all()) == 1
-        assert _DataManager._exists(dn.id)
-
-        # changing data node attribute
-        dn._config_id = "foo"
-        assert dn.config_id == "foo"
-        _DataManager._set(dn)
-        assert len(_DataManager._get_all()) == 1
-        assert dn.config_id == "foo"
-        assert _DataManager._get(dn.id).config_id == "foo"
-
-    def test_delete(self, init_sql_repo):
-        _DataManager._delete_all()
-
-        dn_1 = InMemoryDataNode("config_id", Scope.SCENARIO, id="id_1")
-        dn_2 = InMemoryDataNode("config_id", Scope.SCENARIO, id="id_2")
-        dn_3 = InMemoryDataNode("config_id", Scope.SCENARIO, id="id_3")
-        assert len(_DataManager._get_all()) == 0
-        _DataManager._set(dn_1)
-        _DataManager._set(dn_2)
-        _DataManager._set(dn_3)
-        assert len(_DataManager._get_all()) == 3
-        assert all(_DataManager._exists(dn.id) for dn in [dn_1, dn_2, dn_3])
-        _DataManager._delete(dn_1.id)
-        assert len(_DataManager._get_all()) == 2
-        assert _DataManager._get(dn_2.id).id == dn_2.id
-        assert _DataManager._get(dn_3.id).id == dn_3.id
-        assert _DataManager._get(dn_1.id) is None
-        assert all(_DataManager._exists(dn.id) for dn in [dn_2, dn_3])
-        assert not _DataManager._exists(dn_1.id)
-        _DataManager._delete_all()
-        assert len(_DataManager._get_all()) == 0
-        assert not any(_DataManager._exists(dn.id) for dn in [dn_2, dn_3])
-
-    def test_get_or_create(self, init_sql_repo):
-        def _get_or_create_dn(config, *args):
-            return _DataManager._bulk_get_or_create([config], *args)[config]
-
-        global_dn_config = Config.configure_data_node(
-            id="test_data_node", storage_type="in_memory", scope=Scope.GLOBAL, data="In memory Data Node"
-        )
-        cycle_dn_config = Config.configure_data_node(
-            id="test_data_node1", storage_type="in_memory", scope=Scope.CYCLE, data="In memory scenario"
-        )
-        scenario_dn_config = Config.configure_data_node(
-            id="test_data_node2", storage_type="in_memory", scope=Scope.SCENARIO, data="In memory scenario"
-        )
-
-        _DataManager._delete_all()
-
-        assert len(_DataManager._get_all()) == 0
-        global_dn = _get_or_create_dn(global_dn_config, None, None)
-        assert len(_DataManager._get_all()) == 1
-        global_dn_bis = _get_or_create_dn(global_dn_config, None)
-        assert len(_DataManager._get_all()) == 1
-        assert global_dn.id == global_dn_bis.id
-
-        scenario_dn = _get_or_create_dn(scenario_dn_config, None, "scenario_id")
-        assert len(_DataManager._get_all()) == 2
-        scenario_dn_bis = _get_or_create_dn(scenario_dn_config, None, "scenario_id")
-        assert len(_DataManager._get_all()) == 2
-        assert scenario_dn.id == scenario_dn_bis.id
-        scenario_dn_ter = _get_or_create_dn(scenario_dn_config, None, "scenario_id")
-        assert len(_DataManager._get_all()) == 2
-        assert scenario_dn.id == scenario_dn_bis.id
-        assert scenario_dn_bis.id == scenario_dn_ter.id
-        scenario_dn_quater = _get_or_create_dn(scenario_dn_config, None, "scenario_id_2")
-        assert len(_DataManager._get_all()) == 3
-        assert scenario_dn.id == scenario_dn_bis.id
-        assert scenario_dn_bis.id == scenario_dn_ter.id
-        assert scenario_dn_ter.id != scenario_dn_quater.id
-
-        assert len(_DataManager._get_all()) == 3
-        cycle_dn = _get_or_create_dn(cycle_dn_config, "cycle_id", None)
-        assert len(_DataManager._get_all()) == 4
-        cycle_dn_1 = _get_or_create_dn(cycle_dn_config, "cycle_id", None)
-        assert len(_DataManager._get_all()) == 4
-        assert cycle_dn.id == cycle_dn_1.id
-        cycle_dn_2 = _get_or_create_dn(cycle_dn_config, "cycle_id", "scenario_id")
-        assert len(_DataManager._get_all()) == 4
-        assert cycle_dn.id == cycle_dn_2.id
-        cycle_dn_3 = _get_or_create_dn(cycle_dn_config, "cycle_id", None)
-        assert len(_DataManager._get_all()) == 4
-        assert cycle_dn.id == cycle_dn_3.id
-        cycle_dn_4 = _get_or_create_dn(cycle_dn_config, "cycle_id", "scenario_id")
-        assert len(_DataManager._get_all()) == 4
-        assert cycle_dn.id == cycle_dn_4.id
-        cycle_dn_5 = _get_or_create_dn(cycle_dn_config, "cycle_id", "scenario_id_2")
-        assert len(_DataManager._get_all()) == 4
-        assert cycle_dn.id == cycle_dn_5.id
-
-        assert cycle_dn_1.id == cycle_dn_2.id
-        assert cycle_dn_2.id == cycle_dn_3.id
-        assert cycle_dn_3.id == cycle_dn_4.id
-        assert cycle_dn_4.id == cycle_dn_5.id
-
-    def test_get_data_nodes_by_config_id(self, init_sql_repo):
-        dn_config_1 = Config.configure_data_node("dn_1", scope=Scope.SCENARIO)
-        dn_config_2 = Config.configure_data_node("dn_2", scope=Scope.SCENARIO)
-        dn_config_3 = Config.configure_data_node("dn_3", scope=Scope.SCENARIO)
-
-        dn_1_1 = _DataManager._create_and_set(dn_config_1, None, None)
-        dn_1_2 = _DataManager._create_and_set(dn_config_1, None, None)
-        dn_1_3 = _DataManager._create_and_set(dn_config_1, None, None)
-        assert len(_DataManager._get_all()) == 3
-
-        dn_2_1 = _DataManager._create_and_set(dn_config_2, None, None)
-        dn_2_2 = _DataManager._create_and_set(dn_config_2, None, None)
-        assert len(_DataManager._get_all()) == 5
-
-        dn_3_1 = _DataManager._create_and_set(dn_config_3, None, None)
-        assert len(_DataManager._get_all()) == 6
-
-        dn_1_datanodes = _DataManager._get_by_config_id(dn_config_1.id)
-        assert len(dn_1_datanodes) == 3
-        assert sorted([dn_1_1.id, dn_1_2.id, dn_1_3.id]) == sorted([sequence.id for sequence in dn_1_datanodes])
-
-        dn_2_datanodes = _DataManager._get_by_config_id(dn_config_2.id)
-        assert len(dn_2_datanodes) == 2
-        assert sorted([dn_2_1.id, dn_2_2.id]) == sorted([sequence.id for sequence in dn_2_datanodes])
-
-        dn_3_datanodes = _DataManager._get_by_config_id(dn_config_3.id)
-        assert len(dn_3_datanodes) == 1
-        assert sorted([dn_3_1.id]) == sorted([sequence.id for sequence in dn_3_datanodes])
-
-    def test_get_data_nodes_by_config_id_in_multiple_versions_environment(self, init_sql_repo):
-        dn_config_1 = Config.configure_data_node("dn_1", scope=Scope.SCENARIO)
-        dn_config_2 = Config.configure_data_node("dn_2", scope=Scope.SCENARIO)
-
-        _VersionManager._set_experiment_version("1.0")
-        _DataManager._create_and_set(dn_config_1, None, None)
-        _DataManager._create_and_set(dn_config_1, None, None)
-        _DataManager._create_and_set(dn_config_1, None, None)
-        _DataManager._create_and_set(dn_config_2, None, None)
-        _DataManager._create_and_set(dn_config_2, None, None)
-
-        assert len(_DataManager._get_by_config_id(dn_config_1.id)) == 3
-        assert len(_DataManager._get_by_config_id(dn_config_2.id)) == 2
-
-        _VersionManager._set_experiment_version("2.0")
-        _DataManager._create_and_set(dn_config_1, None, None)
-        _DataManager._create_and_set(dn_config_1, None, None)
-        _DataManager._create_and_set(dn_config_1, None, None)
-        _DataManager._create_and_set(dn_config_2, None, None)
-        _DataManager._create_and_set(dn_config_2, None, None)
-
-        assert len(_DataManager._get_by_config_id(dn_config_1.id)) == 3
-        assert len(_DataManager._get_by_config_id(dn_config_2.id)) == 2

+ 20 - 21
tests/core/data/test_data_repositories.py

@@ -14,14 +14,13 @@ import os
 import pytest
 
 from taipy.core.data._data_fs_repository import _DataFSRepository
-from taipy.core.data._data_sql_repository import _DataSQLRepository
 from taipy.core.data.data_node import DataNode, DataNodeId
 from taipy.core.exceptions import ModelNotFound
 
 
 class TestDataNodeRepository:
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_save_and_load(self, data_node: DataNode, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_save_and_load(self, data_node: DataNode, repo):
         repository = repo()
         repository._save(data_node)
 
@@ -41,16 +40,16 @@ class TestDataNodeRepository:
         assert data_node._edits == loaded_data_node._edits
         assert data_node._properties == loaded_data_node._properties
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_exists(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_exists(self, data_node, repo):
         repository = repo()
         repository._save(data_node)
 
         assert repository._exists(data_node.id)
         assert not repository._exists("not-existed-data-node")
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_load_all(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_load_all(self, data_node, repo):
         repository = repo()
         for i in range(10):
             data_node.id = DataNodeId(f"data_node-{i}")
@@ -59,8 +58,8 @@ class TestDataNodeRepository:
 
         assert len(data_nodes) == 10
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_load_all_with_filters(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_load_all_with_filters(self, data_node, repo):
         repository = repo()
 
         for i in range(10):
@@ -71,8 +70,8 @@ class TestDataNodeRepository:
 
         assert len(objs) == 1
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_delete(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_delete(self, data_node, repo):
         repository = repo()
         repository._save(data_node)
 
@@ -81,8 +80,8 @@ class TestDataNodeRepository:
         with pytest.raises(ModelNotFound):
             repository._load(data_node.id)
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_delete_all(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_delete_all(self, data_node, repo):
         repository = repo()
 
         for i in range(10):
@@ -95,8 +94,8 @@ class TestDataNodeRepository:
 
         assert len(repository._load_all()) == 0
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_delete_many(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_delete_many(self, data_node, repo):
         repository = repo()
 
         for i in range(10):
@@ -110,8 +109,8 @@ class TestDataNodeRepository:
 
         assert len(repository._load_all()) == 7
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_delete_by(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_delete_by(self, data_node, repo):
         repository = repo()
 
         # Create 5 entities with version 1.0 and 5 entities with version 2.0
@@ -126,8 +125,8 @@ class TestDataNodeRepository:
 
         assert len(repository._load_all()) == 5
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_search(self, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_search(self, data_node, repo):
         repository = repo()
 
         for i in range(10):
@@ -147,8 +146,8 @@ class TestDataNodeRepository:
 
         assert repository._search("owner_id", "task-2", filters=[{"version": "non_existed_version"}]) == []
 
-    @pytest.mark.parametrize("repo", [_DataFSRepository, _DataSQLRepository])
-    def test_export(self, tmpdir, data_node, repo, init_sql_repo):
+    @pytest.mark.parametrize("repo", [_DataFSRepository])
+    def test_export(self, tmpdir, data_node, repo):
         repository = repo()
         repository._save(data_node)
 

+ 0 - 255
tests/core/job/test_job_manager_with_sql_repo.py

@@ -1,255 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import multiprocessing
-import random
-import string
-from functools import partial
-from time import sleep
-from typing import cast
-
-import pytest
-
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core import Task
-from taipy.core._orchestrator._dispatcher import _StandaloneJobDispatcher
-from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
-from taipy.core.config.job_config import JobConfig
-from taipy.core.data import InMemoryDataNode
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.data._data_manager_factory import _DataManagerFactory
-from taipy.core.exceptions.exceptions import JobNotDeletedException
-from taipy.core.job._job_manager import _JobManager
-from taipy.core.job.job_id import JobId
-from taipy.core.job.status import Status
-from taipy.core.task._task_manager import _TaskManager
-from tests.core.utils import assert_true_after_time
-
-
-def multiply(nb1: float, nb2: float):
-    return nb1 * nb2
-
-
-def lock_multiply(lock, nb1: float, nb2: float):
-    with lock:
-        return multiply(nb1 or 1, nb2 or 2)
-
-
-def test_create_jobs(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._DEVELOPMENT_MODE)
-
-    task = _create_task(multiply, name="get_job")
-
-    job_1 = _JobManager._create(task, [print], "submit_id", "secnario_id", True)
-    assert _JobManager._get(job_1.id) == job_1
-    assert job_1.is_submitted()
-    assert task.config_id in job_1.id
-    assert job_1.task.id == task.id
-    assert job_1.submit_id == "submit_id"
-    assert job_1.submit_entity_id == "secnario_id"
-    assert job_1.force
-
-    job_2 = _JobManager._create(task, [print], "submit_id_1", "secnario_id", False)
-    assert _JobManager._get(job_2.id) == job_2
-    assert job_2.is_submitted()
-    assert task.config_id in job_2.id
-    assert job_2.task.id == task.id
-    assert job_2.submit_id == "submit_id_1"
-    assert job_2.submit_entity_id == "secnario_id"
-    assert not job_2.force
-
-
-def test_get_job(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._DEVELOPMENT_MODE)
-
-    task = _create_task(multiply, name="get_job")
-
-    job_1 = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-    assert _JobManager._get(job_1.id) == job_1
-    assert _JobManager._get(job_1.id).submit_entity_id == task.id
-
-    job_2 = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-    assert job_1 != job_2
-    assert _JobManager._get(job_1.id).id == job_1.id
-    assert _JobManager._get(job_2.id).id == job_2.id
-    assert _JobManager._get(job_2.id).submit_entity_id == task.id
-
-
-def test_get_latest_job(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._DEVELOPMENT_MODE)
-
-    task = _create_task(multiply, name="get_latest_job")
-    task_2 = _create_task(multiply, name="get_latest_job_2")
-
-    job_1 = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-    assert _JobManager._get_latest(task) == job_1
-    assert _JobManager._get_latest(task_2) is None
-
-    sleep(0.01)  # Comparison is based on time, precision on Windows is not enough important
-    job_2 = _OrchestratorFactory._orchestrator.submit_task(task_2).jobs[0]
-    assert _JobManager._get_latest(task).id == job_1.id
-    assert _JobManager._get_latest(task_2).id == job_2.id
-
-    sleep(0.01)  # Comparison is based on time, precision on Windows is not enough important
-    job_1_bis = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-    assert _JobManager._get_latest(task).id == job_1_bis.id
-    assert _JobManager._get_latest(task_2).id == job_2.id
-
-
-def test_get_job_unknown(init_sql_repo):
-    assert _JobManager._get(JobId("Unknown")) is None
-
-
-def test_get_jobs(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._DEVELOPMENT_MODE)
-
-    task = _create_task(multiply, name="get_all_jobs")
-
-    job_1 = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-    job_2 = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-
-    assert {job.id for job in _JobManager._get_all()} == {job_1.id, job_2.id}
-
-
-def test_delete_job(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._DEVELOPMENT_MODE)
-
-    task = _create_task(multiply, name="delete_job")
-
-    job_1 = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-    job_2 = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-
-    _JobManager._delete(job_1)
-
-    assert [job.id for job in _JobManager._get_all()] == [job_2.id]
-    assert _JobManager._get(job_1.id) is None
-
-
-def test_raise_when_trying_to_delete_unfinished_job(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=3)
-
-    dnm = _DataManagerFactory._build_manager()
-    dn_1 = InMemoryDataNode("dn_config_1", Scope.SCENARIO, properties={"default_data": 1})
-    dnm._set(dn_1)
-    dn_2 = InMemoryDataNode("dn_config_2", Scope.SCENARIO, properties={"default_data": 2})
-    dnm._set(dn_2)
-    dn_3 = InMemoryDataNode("dn_config_3", Scope.SCENARIO)
-    dnm._set(dn_3)
-    proc_manager = multiprocessing.Manager()
-    lock = proc_manager.Lock()
-    task = Task("task_cfg", {}, partial(lock_multiply, lock), [dn_1, dn_2], [dn_3], id="raise_when_delete_unfinished")
-    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
-
-    with lock:
-        job = _OrchestratorFactory._orchestrator.submit_task(task)._jobs[0]
-        assert_true_after_time(job.is_running)
-        assert dispatcher._nb_available_workers == 2
-        with pytest.raises(JobNotDeletedException):
-            _JobManager._delete(job)
-        with pytest.raises(JobNotDeletedException):
-            _JobManager._delete(job, force=False)
-    assert_true_after_time(job.is_completed)
-    _JobManager._delete(job)
-
-
-def test_force_deleting_unfinished_job(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=2)
-
-    m = multiprocessing.Manager()
-    lock = m.Lock()
-    dnm = _DataManagerFactory._build_manager()
-    dn_1 = InMemoryDataNode("dn_config_1", Scope.SCENARIO, properties={"default_data": 1})
-    dnm._set(dn_1)
-    dn_2 = InMemoryDataNode("dn_config_2", Scope.SCENARIO, properties={"default_data": 2})
-    dnm._set(dn_2)
-    dn_3 = InMemoryDataNode("dn_config_3", Scope.SCENARIO)
-    dnm._set(dn_3)
-    task_1 = Task(
-        "task_config_1", {}, partial(lock_multiply, lock), [dn_1, dn_2], [dn_3], id="delete_force_unfinished_job"
-    )
-    reference_last_edit_date = dn_3.last_edit_date
-    _OrchestratorFactory._build_dispatcher()
-    with lock:
-        job = _OrchestratorFactory._orchestrator.submit_task(task_1)._jobs[0]
-        assert_true_after_time(job.is_running)
-        with pytest.raises(JobNotDeletedException):
-            _JobManager._delete(job, force=False)
-        _JobManager._delete(job, force=True)
-    assert _JobManager._get(job.id) is None
-    assert_true_after_time(lambda: reference_last_edit_date != dn_3.last_edit_date)
-
-
-def test_is_deletable(init_sql_repo):
-    assert len(_JobManager._get_all()) == 0
-    task = _create_task(print, 0, "task")
-    job = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
-
-    assert job.is_completed()
-    assert _JobManager._is_deletable(job)
-    assert _JobManager._is_deletable(job.id)
-
-    job.abandoned()
-    assert job.is_abandoned()
-    assert _JobManager._is_deletable(job)
-    assert _JobManager._is_deletable(job.id)
-
-    job.canceled()
-    assert job.is_canceled()
-    assert _JobManager._is_deletable(job)
-    assert _JobManager._is_deletable(job.id)
-
-    job.failed()
-    assert job.is_failed()
-    assert _JobManager._is_deletable(job)
-    assert _JobManager._is_deletable(job.id)
-
-    job.skipped()
-    assert job.is_skipped()
-    assert _JobManager._is_deletable(job)
-    assert _JobManager._is_deletable(job.id)
-
-    job.blocked()
-    assert job.is_blocked()
-    assert not _JobManager._is_deletable(job)
-    assert not _JobManager._is_deletable(job.id)
-
-    job.running()
-    assert job.is_running()
-    assert not _JobManager._is_deletable(job)
-    assert not _JobManager._is_deletable(job.id)
-
-    job.pending()
-    assert job.is_pending()
-    assert not _JobManager._is_deletable(job)
-    assert not _JobManager._is_deletable(job.id)
-
-    job.status = Status.SUBMITTED
-    assert job.is_submitted()
-    assert not _JobManager._is_deletable(job)
-    assert not _JobManager._is_deletable(job.id)
-
-
-def _create_task(function, nb_outputs=1, name=None):
-    input1_dn_config = Config.configure_data_node("input1", scope=Scope.SCENARIO, default_data=21)
-    input2_dn_config = Config.configure_data_node("input2", scope=Scope.SCENARIO, default_data=2)
-    output_dn_configs = [
-        Config.configure_data_node(f"output{i}", scope=Scope.SCENARIO, default_data=0) for i in range(nb_outputs)
-    ]
-    _DataManager._bulk_get_or_create(output_dn_configs)
-    name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
-    task_config = Config.configure_task(
-        id=name,
-        function=function,
-        input=[input1_dn_config, input2_dn_config],
-        output=output_dn_configs,
-    )
-    return _TaskManager._bulk_get_or_create([task_config])[0]

+ 41 - 52
tests/core/job/test_job_repositories.py

@@ -13,48 +13,44 @@ import os
 
 import pytest
 
-from taipy.core.data._data_sql_repository import _DataSQLRepository
+from taipy.core.data._data_fs_repository import _DataFSRepository
 from taipy.core.exceptions import ModelNotFound
 from taipy.core.job._job_fs_repository import _JobFSRepository
-from taipy.core.job._job_sql_repository import _JobSQLRepository
 from taipy.core.job.job import Job, JobId
-from taipy.core.task._task_sql_repository import _TaskSQLRepository
+from taipy.core.task._task_fs_repository import _TaskFSRepository
 from taipy.core.task.task import Task
 
 
 class TestJobRepository:
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_save_and_load(self, data_node, job, repo, init_sql_repo):
-        _DataSQLRepository()._save(data_node)
+    def test_save_and_load(self, data_node, job):
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
 
-        repository = repo()
+        repository = _JobFSRepository()
         repository._save(job)
 
         obj = repository._load(job.id)
         assert isinstance(obj, Job)
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_exists(self, data_node, job, repo, init_sql_repo):
-        _DataSQLRepository()._save(data_node)
+    def test_exists(self, data_node, job):
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
-        repository = repo()
+        repository = _JobFSRepository()
         repository._save(job)
 
         assert repository._exists(job.id)
         assert not repository._exists("not-existed-job")
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_load_all(self, data_node, job, repo, init_sql_repo):
-        _DataSQLRepository()._save(data_node)
+    def test_load_all(self, data_node, job):
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
-        repository = repo()
+        repository = _JobFSRepository()
         for i in range(10):
             job.id = JobId(f"job-{i}")
             repository._save(job)
@@ -62,12 +58,11 @@ class TestJobRepository:
 
         assert len(jobs) == 10
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_load_all_with_filters(self, data_node, job, repo, init_sql_repo):
-        repository = repo()
-        _DataSQLRepository()._save(data_node)
+    def test_load_all_with_filters(self, data_node, job):
+        repository = _JobFSRepository()
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
 
         for i in range(10):
@@ -77,12 +72,11 @@ class TestJobRepository:
 
         assert len(objs) == 1
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_delete(self, data_node, job, repo, init_sql_repo):
-        repository = repo()
-        _DataSQLRepository()._save(data_node)
+    def test_delete(self, data_node, job):
+        repository = _JobFSRepository()
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
         repository._save(job)
 
@@ -91,12 +85,11 @@ class TestJobRepository:
         with pytest.raises(ModelNotFound):
             repository._load(job.id)
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_delete_all(self, data_node, job, repo, init_sql_repo):
-        repository = repo()
-        _DataSQLRepository()._save(data_node)
+    def test_delete_all(self, data_node, job):
+        repository = _JobFSRepository()
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
 
         for i in range(10):
@@ -109,12 +102,11 @@ class TestJobRepository:
 
         assert len(repository._load_all()) == 0
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_delete_many(self, data_node, job, repo, init_sql_repo):
-        repository = repo()
-        _DataSQLRepository()._save(data_node)
+    def test_delete_many(self, data_node, job):
+        repository = _JobFSRepository()
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
 
         for i in range(10):
@@ -128,12 +120,11 @@ class TestJobRepository:
 
         assert len(repository._load_all()) == 7
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_delete_by(self, data_node, job, repo, init_sql_repo):
-        repository = repo()
-        _DataSQLRepository()._save(data_node)
+    def test_delete_by(self, data_node, job):
+        repository = _JobFSRepository()
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
 
         # Create 5 entities with version 1.0 and 5 entities with version 2.0
@@ -148,12 +139,11 @@ class TestJobRepository:
 
         assert len(repository._load_all()) == 5
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_search(self, data_node, job, repo, init_sql_repo):
-        repository = repo()
-        _DataSQLRepository()._save(data_node)
+    def test_search(self, data_node, job):
+        repository = _JobFSRepository()
+        _DataFSRepository()._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
-        _TaskSQLRepository()._save(task)
+        _TaskFSRepository()._save(task)
         job._task = task
 
         for i in range(10):
@@ -172,12 +162,11 @@ class TestJobRepository:
 
         assert repository._search("id", "job-2", filters=[{"version": "non_existed_version"}]) == []
 
-    @pytest.mark.parametrize("repo", [_JobFSRepository, _JobSQLRepository])
-    def test_export(self, tmpdir, job, repo, init_sql_repo):
-        repository = repo()
+    def test_export(self, tmpdir, job):
+        repository = _JobFSRepository()
         repository._save(job)
 
         repository._export(job.id, tmpdir.strpath)
-        dir_path = repository.dir_path if repo == _JobFSRepository else os.path.join(tmpdir.strpath, "job")
+        dir_path = repository.dir_path
 
         assert os.path.exists(os.path.join(dir_path, f"{job.id}.json"))

+ 1 - 28
tests/core/repository/mocks.py

@@ -14,26 +14,12 @@ import pathlib
 from dataclasses import dataclass
 from typing import Any, Dict, Optional
 
-from sqlalchemy import Column, String, Table
-from sqlalchemy.dialects import sqlite
-from sqlalchemy.orm import declarative_base, registry
-from sqlalchemy.schema import CreateTable
-
 from taipy.config.config import Config
 from taipy.core._repository._abstract_converter import _AbstractConverter
 from taipy.core._repository._filesystem_repository import _FileSystemRepository
-from taipy.core._repository._sql_repository import _SQLRepository
 from taipy.core._version._version_manager import _VersionManager
 
 
-class Base:
-    __allow_unmapped__ = True
-
-
-Base = declarative_base(cls=Base)  # type: ignore
-mapper_registry = registry()
-
-
 @dataclass
 class MockObj:
     def __init__(self, id: str, name: str, version: Optional[str] = None) -> None:
@@ -46,14 +32,7 @@ class MockObj:
 
 
 @dataclass
-class MockModel(Base):  # type: ignore
-    __table__ = Table(
-        "mock_model",
-        mapper_registry.metadata,
-        Column("id", String(200), primary_key=True),
-        Column("name", String(200)),
-        Column("version", String(200)),
-    )
+class MockModel:  # type: ignore
     id: str
     name: str
     version: str
@@ -93,9 +72,3 @@ class MockFSRepository(_FileSystemRepository):
     @property
     def _storage_folder(self) -> pathlib.Path:
         return pathlib.Path(Config.core.storage_folder)  # type: ignore
-
-
-class MockSQLRepository(_SQLRepository):
-    def __init__(self, **kwargs):
-        super().__init__(**kwargs)
-        self.db.execute(str(CreateTable(MockModel.__table__, if_not_exists=True).compile(dialect=sqlite.dialect())))

+ 59 - 0
tests/core/repository/test_base_model.py

@@ -0,0 +1,59 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import dataclasses
+import enum
+import json
+
+import pytest
+
+from taipy.core._repository._base_taipy_model import _BaseModel, _Encoder
+
+
+class SampleEnum(enum.Enum):
+    VALUE1 = "value1"
+    VALUE2 = "value2"
+
+
+@dataclasses.dataclass
+class SampleModel(_BaseModel):
+    attr1: int
+    attr2: str
+    attr3: SampleEnum
+
+
+@pytest.fixture
+def sample_model():
+    return SampleModel(attr1=1, attr2="test", attr3=SampleEnum.VALUE1)
+
+
+def test_iter(sample_model):
+    items = dict(sample_model)
+    expected_items = {"attr1": 1, "attr2": "test", "attr3": SampleEnum.VALUE1}
+    assert items == expected_items
+
+
+def test_to_dict(sample_model):
+    model_dict = sample_model.to_dict()
+    expected_dict = {"attr1": 1, "attr2": "test", "attr3": repr(SampleEnum.VALUE1)}
+    assert model_dict == expected_dict
+
+
+def test_serialize_attribute(sample_model):
+    serialized = _BaseModel._serialize_attribute(sample_model.attr2)
+    expected_serialized = json.dumps(sample_model.attr2, ensure_ascii=False, cls=_Encoder)
+    assert serialized == expected_serialized
+
+
+def test_deserialize_attribute(sample_model):
+    serialized = json.dumps(sample_model.attr2, ensure_ascii=False, cls=_Encoder)
+    deserialized = _BaseModel._deserialize_attribute(serialized)
+    assert deserialized == sample_model.attr2

+ 8 - 15
tests/core/repository/test_repositories.py

@@ -18,7 +18,7 @@ import pytest
 
 from taipy.core.exceptions.exceptions import ModelNotFound
 
-from .mocks import MockConverter, MockFSRepository, MockModel, MockObj, MockSQLRepository
+from .mocks import MockConverter, MockFSRepository, MockModel, MockObj
 
 
 class TestRepositoriesStorage:
@@ -42,10 +42,9 @@ class TestRepositoriesStorage:
         "mock_repo,params",
         [
             (MockFSRepository, {"model_type": MockModel, "dir_name": "mock_model", "converter": MockConverter}),
-            (MockSQLRepository, {"model_type": MockModel, "converter": MockConverter}),
         ],
     )
-    def test_save_and_fetch_model(self, mock_repo, params, init_sql_repo):
+    def test_save_and_fetch_model(self, mock_repo, params):
         r = mock_repo(**params)
         m = MockObj("uuid", "foo")
         r._save(m)
@@ -57,10 +56,9 @@ class TestRepositoriesStorage:
         "mock_repo,params",
         [
             (MockFSRepository, {"model_type": MockModel, "dir_name": "mock_model", "converter": MockConverter}),
-            (MockSQLRepository, {"model_type": MockModel, "converter": MockConverter}),
         ],
     )
-    def test_exists(self, mock_repo, params, init_sql_repo):
+    def test_exists(self, mock_repo, params):
         r = mock_repo(**params)
         m = MockObj("uuid", "foo")
         r._save(m)
@@ -72,10 +70,9 @@ class TestRepositoriesStorage:
         "mock_repo,params",
         [
             (MockFSRepository, {"model_type": MockModel, "dir_name": "mock_model", "converter": MockConverter}),
-            (MockSQLRepository, {"model_type": MockModel, "converter": MockConverter}),
         ],
     )
-    def test_get_all(self, mock_repo, params, init_sql_repo):
+    def test_get_all(self, mock_repo, params):
         objs = []
         r = mock_repo(**params)
         r._delete_all()
@@ -96,10 +93,9 @@ class TestRepositoriesStorage:
         "mock_repo,params",
         [
             (MockFSRepository, {"model_type": MockModel, "dir_name": "mock_model", "converter": MockConverter}),
-            (MockSQLRepository, {"model_type": MockModel, "converter": MockConverter}),
         ],
     )
-    def test_delete_all(self, mock_repo, params, init_sql_repo):
+    def test_delete_all(self, mock_repo, params):
         r = mock_repo(**params)
         r._delete_all()
 
@@ -118,10 +114,9 @@ class TestRepositoriesStorage:
         "mock_repo,params",
         [
             (MockFSRepository, {"model_type": MockModel, "dir_name": "mock_model", "converter": MockConverter}),
-            (MockSQLRepository, {"model_type": MockModel, "converter": MockConverter}),
         ],
     )
-    def test_delete_many(self, mock_repo, params, init_sql_repo):
+    def test_delete_many(self, mock_repo, params):
         r = mock_repo(**params)
         r._delete_all()
 
@@ -139,10 +134,9 @@ class TestRepositoriesStorage:
         "mock_repo,params",
         [
             (MockFSRepository, {"model_type": MockModel, "dir_name": "mock_model", "converter": MockConverter}),
-            (MockSQLRepository, {"model_type": MockModel, "converter": MockConverter}),
         ],
     )
-    def test_search(self, mock_repo, params, init_sql_repo):
+    def test_search(self, mock_repo, params):
         r = mock_repo(**params)
         r._delete_all()
 
@@ -159,11 +153,10 @@ class TestRepositoriesStorage:
         "mock_repo,params",
         [
             (MockFSRepository, {"model_type": MockModel, "dir_name": "mock_model", "converter": MockConverter}),
-            (MockSQLRepository, {"model_type": MockModel, "converter": MockConverter}),
         ],
     )
     @pytest.mark.parametrize("export_path", ["tmp"])
-    def test_export(self, mock_repo, params, export_path, init_sql_repo):
+    def test_export(self, mock_repo, params, export_path):
         r = mock_repo(**params)
 
         m = MockObj("uuid", "foo")

+ 10 - 1
tests/core/scenario/test_scenario_manager.py

@@ -1508,12 +1508,21 @@ def test_filter_scenarios_by_creation_datetime():
     assert len(filtered_scenarios) == 1
     assert [s_1_1] == filtered_scenarios
 
-    # The time period is inclusive
+    # The start time is inclusive
     filtered_scenarios = _ScenarioManager._filter_by_creation_time(
         scenarios=all_scenarios,
         created_start_time=datetime(2024, 1, 1),
         created_end_time=datetime(2024, 1, 3),
     )
+    assert len(filtered_scenarios) == 1
+    assert [s_1_1] == filtered_scenarios
+
+    # The end time is exclusive
+    filtered_scenarios = _ScenarioManager._filter_by_creation_time(
+        scenarios=all_scenarios,
+        created_start_time=datetime(2024, 1, 1),
+        created_end_time=datetime(2024, 1, 4),
+    )
     assert len(filtered_scenarios) == 2
     assert sorted([s_1_1.id, s_1_2.id]) == sorted([scenario.id for scenario in filtered_scenarios])
 

+ 0 - 490
tests/core/scenario/test_scenario_manager_with_sql_repo.py

@@ -1,490 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-from datetime import datetime, timedelta
-
-import freezegun
-import pytest
-
-from taipy.config.common.frequency import Frequency
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core._version._version_manager import _VersionManager
-from taipy.core.cycle._cycle_manager import _CycleManager
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.data.in_memory import InMemoryDataNode
-from taipy.core.exceptions.exceptions import DeletingPrimaryScenario
-from taipy.core.scenario._scenario_manager import _ScenarioManager
-from taipy.core.scenario.scenario import Scenario
-from taipy.core.scenario.scenario_id import ScenarioId
-from taipy.core.sequence._sequence_manager import _SequenceManager
-from taipy.core.task._task_manager import _TaskManager
-from taipy.core.task.task import Task
-from taipy.core.task.task_id import TaskId
-
-
-def test_set_and_get_scenario(cycle, init_sql_repo):
-    scenario_id_1 = ScenarioId("scenario_id_1")
-    scenario_1 = Scenario("scenario_name_1", [], {}, [], scenario_id_1)
-
-    input_dn_2 = InMemoryDataNode("foo", Scope.SCENARIO)
-    output_dn_2 = InMemoryDataNode("bar", Scope.SCENARIO)
-    additional_dn_2 = InMemoryDataNode("zyx", Scope.SCENARIO)
-    task_name_2 = "task_2"
-    task_2 = Task(task_name_2, {}, print, [input_dn_2], [output_dn_2], TaskId("task_id_2"))
-    scenario_id_2 = ScenarioId("scenario_id_2")
-    scenario_2 = Scenario(
-        "scenario_name_2",
-        [task_2],
-        {},
-        [additional_dn_2],
-        scenario_id_2,
-        datetime.now(),
-        True,
-        cycle,
-        sequences={"sequence_2": {"tasks": [task_2]}},
-    )
-
-    additional_dn_3 = InMemoryDataNode("baz", Scope.SCENARIO)
-    task_name_3 = "task_3"
-    task_3 = Task(task_name_3, {}, print, id=TaskId("task_id_3"))
-    scenario_3_with_same_id = Scenario(
-        "scenario_name_3",
-        [task_3],
-        {},
-        [additional_dn_3],
-        scenario_id_1,
-        datetime.now(),
-        False,
-        cycle,
-        sequences={"sequence_3": {}},
-    )
-
-    # No existing scenario
-    assert len(_ScenarioManager._get_all()) == 0
-    assert _ScenarioManager._get(scenario_id_1) is None
-    assert _ScenarioManager._get(scenario_1) is None
-    assert _ScenarioManager._get(scenario_id_2) is None
-    assert _ScenarioManager._get(scenario_2) is None
-
-    # Save one scenario. We expect to have only one scenario stored
-    _ScenarioManager._set(scenario_1)
-    assert len(_ScenarioManager._get_all()) == 1
-    assert _ScenarioManager._get(scenario_id_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_id_1).config_id == scenario_1.config_id
-    assert len(_ScenarioManager._get(scenario_id_1).tasks) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).additional_data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).sequences) == 0
-    assert _ScenarioManager._get(scenario_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_1).config_id == scenario_1.config_id
-    assert len(_ScenarioManager._get(scenario_1).tasks) == 0
-    assert len(_ScenarioManager._get(scenario_1).additional_data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_1).data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_1).sequences) == 0
-    assert _ScenarioManager._get(scenario_id_2) is None
-    assert _ScenarioManager._get(scenario_2) is None
-
-    # Save a second scenario. Now, we expect to have a total of two scenarios stored
-    _TaskManager._set(task_2)
-    _CycleManager._set(cycle)
-    _ScenarioManager._set(scenario_2)
-    _DataManager._set(additional_dn_2)
-    assert len(_ScenarioManager._get_all()) == 2
-    assert _ScenarioManager._get(scenario_id_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_id_1).config_id == scenario_1.config_id
-    assert len(_ScenarioManager._get(scenario_id_1).tasks) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).additional_data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).sequences) == 0
-    assert _ScenarioManager._get(scenario_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_1).config_id == scenario_1.config_id
-    assert len(_ScenarioManager._get(scenario_1).tasks) == 0
-    assert len(_ScenarioManager._get(scenario_1).additional_data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_1).data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_1).sequences) == 0
-    assert _ScenarioManager._get(scenario_id_2).id == scenario_2.id
-    assert _ScenarioManager._get(scenario_id_2).config_id == scenario_2.config_id
-    assert len(_ScenarioManager._get(scenario_id_2).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_id_2).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_id_2).data_nodes) == 3
-    assert len(_ScenarioManager._get(scenario_id_2).sequences) == 1
-    assert _ScenarioManager._get(scenario_2).id == scenario_2.id
-    assert _ScenarioManager._get(scenario_2).config_id == scenario_2.config_id
-    assert len(_ScenarioManager._get(scenario_2).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_2).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_2).data_nodes) == 3
-    assert len(_ScenarioManager._get(scenario_2).sequences) == 1
-    assert _TaskManager._get(task_2.id).id == task_2.id
-    assert _ScenarioManager._get(scenario_id_2).cycle == cycle
-    assert _ScenarioManager._get(scenario_2).cycle == cycle
-    assert _CycleManager._get(cycle.id).id == cycle.id
-
-    # We save the first scenario again. We expect nothing to change
-    _ScenarioManager._set(scenario_1)
-    assert len(_ScenarioManager._get_all()) == 2
-    assert _ScenarioManager._get(scenario_id_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_id_1).config_id == scenario_1.config_id
-    assert len(_ScenarioManager._get(scenario_id_1).tasks) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).additional_data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_id_1).sequences) == 0
-    assert _ScenarioManager._get(scenario_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_1).config_id == scenario_1.config_id
-    assert len(_ScenarioManager._get(scenario_1).tasks) == 0
-    assert len(_ScenarioManager._get(scenario_1).additional_data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_1).data_nodes) == 0
-    assert len(_ScenarioManager._get(scenario_1).sequences) == 0
-    assert _ScenarioManager._get(scenario_id_2).id == scenario_2.id
-    assert _ScenarioManager._get(scenario_id_2).config_id == scenario_2.config_id
-    assert len(_ScenarioManager._get(scenario_id_2).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_id_2).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_id_2).data_nodes) == 3
-    assert len(_ScenarioManager._get(scenario_id_2).sequences) == 1
-    assert _ScenarioManager._get(scenario_2).id == scenario_2.id
-    assert _ScenarioManager._get(scenario_2).config_id == scenario_2.config_id
-    assert len(_ScenarioManager._get(scenario_2).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_2).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_2).data_nodes) == 3
-    assert len(_ScenarioManager._get(scenario_2).sequences) == 1
-    assert _TaskManager._get(task_2.id).id == task_2.id
-    assert _CycleManager._get(cycle.id).id == cycle.id
-
-    # We save a third scenario with same id as the first one.
-    # We expect the first scenario to be updated
-    _DataManager._set(additional_dn_3)
-    _TaskManager._set(task_3)
-    _TaskManager._set(scenario_2.tasks[task_name_2])
-    _ScenarioManager._set(scenario_3_with_same_id)
-    assert len(_ScenarioManager._get_all()) == 2
-    assert _ScenarioManager._get(scenario_id_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_id_1).config_id == scenario_3_with_same_id.config_id
-    assert len(_ScenarioManager._get(scenario_id_1).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_id_1).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_id_1).data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_id_1).sequences) == 1
-    assert _ScenarioManager._get(scenario_id_1).cycle == cycle
-    assert _ScenarioManager._get(scenario_1).id == scenario_1.id
-    assert _ScenarioManager._get(scenario_1).config_id == scenario_3_with_same_id.config_id
-    assert len(_ScenarioManager._get(scenario_1).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_1).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_1).data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_1).sequences) == 1
-    assert _ScenarioManager._get(scenario_1).cycle == cycle
-    assert _ScenarioManager._get(scenario_id_2).id == scenario_2.id
-    assert _ScenarioManager._get(scenario_id_2).config_id == scenario_2.config_id
-    assert len(_ScenarioManager._get(scenario_id_2).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_id_2).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_id_2).data_nodes) == 3
-    assert len(_ScenarioManager._get(scenario_id_2).sequences) == 1
-    assert _ScenarioManager._get(scenario_2).id == scenario_2.id
-    assert _ScenarioManager._get(scenario_2).config_id == scenario_2.config_id
-    assert len(_ScenarioManager._get(scenario_2).tasks) == 1
-    assert len(_ScenarioManager._get(scenario_2).additional_data_nodes) == 1
-    assert len(_ScenarioManager._get(scenario_2).data_nodes) == 3
-    assert len(_ScenarioManager._get(scenario_2).sequences) == 1
-    assert _TaskManager._get(task_2.id).id == task_2.id
-
-
-def test_get_all_on_multiple_versions_environment(init_sql_repo):
-    # Create 5 scenarios with 2 versions each
-    # Only version 1.0 has the scenario with config_id = "config_id_1"
-    # Only version 2.0 has the scenario with config_id = "config_id_6"
-    for version in range(1, 3):
-        for i in range(5):
-            _ScenarioManager._set(
-                Scenario(f"config_id_{i+version}", [], {}, ScenarioId(f"id{i}_v{version}"), version=f"{version}.0")
-            )
-
-    _VersionManager._set_experiment_version("1.0")
-    assert len(_ScenarioManager._get_all()) == 5
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_1"}])) == 1
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_6"}])) == 0
-
-    _VersionManager._set_experiment_version("2.0")
-    assert len(_ScenarioManager._get_all()) == 5
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_1"}])) == 0
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_6"}])) == 1
-
-    _VersionManager._set_development_version("1.0")
-    assert len(_ScenarioManager._get_all()) == 5
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_1"}])) == 1
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_6"}])) == 0
-
-    _VersionManager._set_development_version("2.0")
-    assert len(_ScenarioManager._get_all()) == 5
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_1"}])) == 0
-    assert len(_ScenarioManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_6"}])) == 1
-
-
-def test_create_scenario_does_not_modify_config(init_sql_repo):
-    creation_date_1 = datetime.now()
-    name_1 = "name_1"
-    scenario_config = Config.configure_scenario("sc", None, None, Frequency.DAILY)
-
-    assert scenario_config.properties.get("name") is None
-    assert len(scenario_config.properties) == 0
-
-    scenario = _ScenarioManager._create(scenario_config, creation_date=creation_date_1, name=name_1)
-    assert len(scenario_config.properties) == 0
-    assert len(scenario.properties) == 1
-    assert scenario.properties.get("name") == name_1
-    assert scenario.name == name_1
-
-    scenario.properties["foo"] = "bar"
-    _ScenarioManager._set(scenario)
-    assert len(scenario_config.properties) == 0
-    assert len(scenario.properties) == 2
-    assert scenario.properties.get("foo") == "bar"
-    assert scenario.properties.get("name") == name_1
-    assert scenario.name == name_1
-
-    scenario_2 = _ScenarioManager._create(scenario_config, creation_date=creation_date_1)
-    assert scenario_2.name is None
-
-
-def test_create_and_delete_scenario(init_sql_repo):
-    creation_date_1 = datetime.now()
-    creation_date_2 = creation_date_1 + timedelta(minutes=10)
-
-    name_1 = "name_1"
-
-    _ScenarioManager._delete_all()
-    assert len(_ScenarioManager._get_all()) == 0
-
-    scenario_config = Config.configure_scenario("sc", None, None, Frequency.DAILY)
-
-    scenario_1 = _ScenarioManager._create(scenario_config, creation_date=creation_date_1, name=name_1)
-    assert scenario_1.config_id == "sc"
-    assert scenario_1.sequences == {}
-    assert scenario_1.tasks == {}
-    assert scenario_1.additional_data_nodes == {}
-    assert scenario_1.data_nodes == {}
-    assert scenario_1.cycle.frequency == Frequency.DAILY
-    assert scenario_1.is_primary
-    assert scenario_1.cycle.creation_date == creation_date_1
-    assert scenario_1.cycle.start_date.date() == creation_date_1.date()
-    assert scenario_1.cycle.end_date.date() == creation_date_1.date()
-    assert scenario_1.creation_date == creation_date_1
-    assert scenario_1.name == name_1
-    assert scenario_1.properties["name"] == name_1
-    assert scenario_1.tags == set()
-
-    cycle_id_1 = scenario_1.cycle.id
-    assert _CycleManager._get(cycle_id_1).id == cycle_id_1
-    _ScenarioManager._delete(scenario_1.id)
-    assert _ScenarioManager._get(scenario_1.id) is None
-    assert _CycleManager._get(cycle_id_1) is None
-
-    # Recreate scenario_1
-    scenario_1 = _ScenarioManager._create(scenario_config, creation_date=creation_date_1, name=name_1)
-
-    scenario_2 = _ScenarioManager._create(scenario_config, creation_date=creation_date_2)
-    assert scenario_2.config_id == "sc"
-    assert scenario_2.sequences == {}
-    assert scenario_2.tasks == {}
-    assert scenario_2.additional_data_nodes == {}
-    assert scenario_2.data_nodes == {}
-    assert scenario_2.cycle.frequency == Frequency.DAILY
-    assert not scenario_2.is_primary
-    assert scenario_2.cycle.creation_date == creation_date_1
-    assert scenario_2.cycle.start_date.date() == creation_date_2.date()
-    assert scenario_2.cycle.end_date.date() == creation_date_2.date()
-    assert scenario_2.properties.get("name") is None
-    assert scenario_2.tags == set()
-
-    assert scenario_1 != scenario_2
-    assert scenario_1.cycle == scenario_2.cycle
-
-    assert len(_ScenarioManager._get_all()) == 2
-    with pytest.raises(DeletingPrimaryScenario):
-        _ScenarioManager._delete(
-            scenario_1.id,
-        )
-
-    _ScenarioManager._delete(
-        scenario_2.id,
-    )
-    assert len(_ScenarioManager._get_all()) == 1
-    _ScenarioManager._delete(scenario_1.id)
-    assert len(_ScenarioManager._get_all()) == 0
-
-
-def mult_by_2(nb: int):
-    return nb * 2
-
-
-def mult_by_3(nb: int):
-    return nb * 3
-
-
-def mult_by_4(nb: int):
-    return nb * 4
-
-
-def test_scenario_manager_only_creates_data_node_once(init_sql_repo):
-    # dn_1 ---> mult_by_2 ---> dn_2 ---> mult_by_3 ---> dn_6
-    # dn_1 ---> mult_by_4 ---> dn_4
-
-    dn_config_1 = Config.configure_data_node("foo", "in_memory", Scope.GLOBAL, default_data=1)
-    dn_config_2 = Config.configure_data_node("bar", "in_memory", Scope.CYCLE, default_data=0)
-    dn_config_6 = Config.configure_data_node("baz", "in_memory", Scope.CYCLE, default_data=0)
-    dn_config_4 = Config.configure_data_node("qux", "in_memory", Scope.SCENARIO, default_data=0)
-    task_mult_by_2_config = Config.configure_task("mult_by_2", mult_by_2, [dn_config_1], dn_config_2)
-    task_mult_by_3_config = Config.configure_task("mult_by_3", mult_by_3, [dn_config_2], dn_config_6)
-    task_mult_by_4_config = Config.configure_task("mult_by_4", mult_by_4, [dn_config_1], dn_config_4)
-    scenario_config = Config.configure_scenario(
-        "awesome_scenario", [task_mult_by_2_config, task_mult_by_3_config, task_mult_by_4_config], None, Frequency.DAILY
-    )
-    scenario_config.add_sequences(
-        {"by_6": [task_mult_by_2_config, task_mult_by_3_config], "by_4": [task_mult_by_4_config]}
-    )
-
-    assert len(_DataManager._get_all()) == 0
-    assert len(_TaskManager._get_all()) == 0
-    assert len(_SequenceManager._get_all()) == 0
-    assert len(_ScenarioManager._get_all()) == 0
-    assert len(_CycleManager._get_all()) == 0
-
-    scenario_1 = _ScenarioManager._create(scenario_config)
-
-    assert len(_DataManager._get_all()) == 4
-    assert len(_TaskManager._get_all()) == 3
-    assert len(_SequenceManager._get_all()) == 2
-    assert len(_ScenarioManager._get_all()) == 1
-    assert scenario_1.foo.read() == 1
-    assert scenario_1.bar.read() == 0
-    assert scenario_1.baz.read() == 0
-    assert scenario_1.qux.read() == 0
-    assert scenario_1.by_6._get_sorted_tasks()[0][0].config_id == task_mult_by_2_config.id
-    assert scenario_1.by_6._get_sorted_tasks()[1][0].config_id == task_mult_by_3_config.id
-    assert scenario_1.by_4._get_sorted_tasks()[0][0].config_id == task_mult_by_4_config.id
-    assert scenario_1.tasks.keys() == {task_mult_by_2_config.id, task_mult_by_3_config.id, task_mult_by_4_config.id}
-
-    scenario_1_sorted_tasks = scenario_1._get_sorted_tasks()
-    expected = [{task_mult_by_2_config.id, task_mult_by_4_config.id}, {task_mult_by_3_config.id}]
-    for i, list_tasks_by_level in enumerate(scenario_1_sorted_tasks):
-        assert {t.config_id for t in list_tasks_by_level} == expected[i]
-    assert scenario_1.cycle.frequency == Frequency.DAILY
-
-    _ScenarioManager._create(scenario_config)
-
-    assert len(_DataManager._get_all()) == 5
-    assert len(_TaskManager._get_all()) == 4
-    assert len(_SequenceManager._get_all()) == 4
-    assert len(_ScenarioManager._get_all()) == 2
-
-
-def test_get_scenarios_by_config_id(init_sql_repo):
-    scenario_config_1 = Config.configure_scenario("s1", sequence_configs=[])
-    scenario_config_2 = Config.configure_scenario("s2", sequence_configs=[])
-    scenario_config_3 = Config.configure_scenario("s3", sequence_configs=[])
-
-    s_1_1 = _ScenarioManager._create(scenario_config_1)
-    s_1_2 = _ScenarioManager._create(scenario_config_1)
-    s_1_3 = _ScenarioManager._create(scenario_config_1)
-    assert len(_ScenarioManager._get_all()) == 3
-
-    s_2_1 = _ScenarioManager._create(scenario_config_2)
-    s_2_2 = _ScenarioManager._create(scenario_config_2)
-    assert len(_ScenarioManager._get_all()) == 5
-
-    s_3_1 = _ScenarioManager._create(scenario_config_3)
-    assert len(_ScenarioManager._get_all()) == 6
-
-    s1_scenarios = _ScenarioManager._get_by_config_id(scenario_config_1.id)
-    assert len(s1_scenarios) == 3
-    assert sorted([s_1_1.id, s_1_2.id, s_1_3.id]) == sorted([scenario.id for scenario in s1_scenarios])
-
-    s2_scenarios = _ScenarioManager._get_by_config_id(scenario_config_2.id)
-    assert len(s2_scenarios) == 2
-    assert sorted([s_2_1.id, s_2_2.id]) == sorted([scenario.id for scenario in s2_scenarios])
-
-    s3_scenarios = _ScenarioManager._get_by_config_id(scenario_config_3.id)
-    assert len(s3_scenarios) == 1
-    assert sorted([s_3_1.id]) == sorted([scenario.id for scenario in s3_scenarios])
-
-
-def test_get_scenarios_by_config_id_in_multiple_versions_environment(init_sql_repo):
-    scenario_config_1 = Config.configure_scenario("s1", sequence_configs=[])
-    scenario_config_2 = Config.configure_scenario("s2", sequence_configs=[])
-
-    _VersionManager._set_experiment_version("1.0")
-    _ScenarioManager._create(scenario_config_1)
-    _ScenarioManager._create(scenario_config_1)
-    _ScenarioManager._create(scenario_config_1)
-    _ScenarioManager._create(scenario_config_2)
-    _ScenarioManager._create(scenario_config_2)
-
-    assert len(_ScenarioManager._get_by_config_id(scenario_config_1.id)) == 3
-    assert len(_ScenarioManager._get_by_config_id(scenario_config_2.id)) == 2
-
-    _VersionManager._set_experiment_version("2.0")
-    _ScenarioManager._create(scenario_config_1)
-    _ScenarioManager._create(scenario_config_1)
-    _ScenarioManager._create(scenario_config_1)
-    _ScenarioManager._create(scenario_config_2)
-    _ScenarioManager._create(scenario_config_2)
-
-    assert len(_ScenarioManager._get_by_config_id(scenario_config_1.id)) == 3
-    assert len(_ScenarioManager._get_by_config_id(scenario_config_2.id)) == 2
-
-
-def test_filter_scenarios_by_creation_datetime(init_sql_repo):
-    scenario_config_1 = Config.configure_scenario("s1", sequence_configs=[])
-
-    with freezegun.freeze_time("2024-01-01"):
-        s_1_1 = _ScenarioManager._create(scenario_config_1)
-    with freezegun.freeze_time("2024-01-03"):
-        s_1_2 = _ScenarioManager._create(scenario_config_1)
-    with freezegun.freeze_time("2024-02-01"):
-        s_1_3 = _ScenarioManager._create(scenario_config_1)
-
-    all_scenarios = _ScenarioManager._get_all()
-
-    filtered_scenarios = _ScenarioManager._filter_by_creation_time(
-        scenarios=all_scenarios,
-        created_start_time=datetime(2024, 1, 1),
-        created_end_time=datetime(2024, 1, 2),
-    )
-    assert len(filtered_scenarios) == 1
-    assert [s_1_1] == filtered_scenarios
-
-    # The time period is inclusive
-    filtered_scenarios = _ScenarioManager._filter_by_creation_time(
-        scenarios=all_scenarios,
-        created_start_time=datetime(2024, 1, 1),
-        created_end_time=datetime(2024, 1, 3),
-    )
-    assert len(filtered_scenarios) == 2
-    assert sorted([s_1_1.id, s_1_2.id]) == sorted([scenario.id for scenario in filtered_scenarios])
-
-    filtered_scenarios = _ScenarioManager._filter_by_creation_time(
-        scenarios=all_scenarios,
-        created_start_time=datetime(2023, 1, 1),
-        created_end_time=datetime(2025, 1, 1),
-    )
-    assert len(filtered_scenarios) == 3
-    assert sorted([s_1_1.id, s_1_2.id, s_1_3.id]) == sorted([scenario.id for scenario in filtered_scenarios])
-
-    filtered_scenarios = _ScenarioManager._filter_by_creation_time(
-        scenarios=all_scenarios,
-        created_start_time=datetime(2024, 2, 1),
-    )
-    assert len(filtered_scenarios) == 1
-    assert [s_1_3] == filtered_scenarios
-
-    filtered_scenarios = _ScenarioManager._filter_by_creation_time(
-        scenarios=all_scenarios,
-        created_end_time=datetime(2024, 1, 2),
-    )
-    assert len(filtered_scenarios) == 1
-    assert [s_1_1] == filtered_scenarios

+ 21 - 32
tests/core/scenario/test_scenario_repositories.py

@@ -15,14 +15,12 @@ import pytest
 
 from taipy.core.exceptions import ModelNotFound
 from taipy.core.scenario._scenario_fs_repository import _ScenarioFSRepository
-from taipy.core.scenario._scenario_sql_repository import _ScenarioSQLRepository
 from taipy.core.scenario.scenario import Scenario, ScenarioId
 
 
 class TestScenarioFSRepository:
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_save_and_load(self, scenario: Scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_save_and_load(self, scenario: Scenario):
+        repository = _ScenarioFSRepository()
         repository._save(scenario)
 
         loaded_scenario = repository._load(scenario.id)
@@ -39,17 +37,15 @@ class TestScenarioFSRepository:
         assert scenario._sequences == loaded_scenario._sequences
         assert scenario._version == loaded_scenario._version
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_exists(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_exists(self, scenario):
+        repository = _ScenarioFSRepository()
         repository._save(scenario)
 
         assert repository._exists(scenario.id)
         assert not repository._exists("not-existed-scenario")
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_load_all(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_load_all(self, scenario):
+        repository = _ScenarioFSRepository()
         for i in range(10):
             scenario.id = ScenarioId(f"scenario-{i}")
             repository._save(scenario)
@@ -57,9 +53,8 @@ class TestScenarioFSRepository:
 
         assert len(data_nodes) == 10
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_load_all_with_filters(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_load_all_with_filters(self, scenario):
+        repository = _ScenarioFSRepository()
 
         for i in range(10):
             scenario.id = ScenarioId(f"scenario-{i}")
@@ -68,9 +63,8 @@ class TestScenarioFSRepository:
 
         assert len(objs) == 1
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_delete(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_delete(self, scenario):
+        repository = _ScenarioFSRepository()
         repository._save(scenario)
 
         repository._delete(scenario.id)
@@ -78,9 +72,8 @@ class TestScenarioFSRepository:
         with pytest.raises(ModelNotFound):
             repository._load(scenario.id)
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_delete_all(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_delete_all(self, scenario):
+        repository = _ScenarioFSRepository()
 
         for i in range(10):
             scenario.id = ScenarioId(f"scenario-{i}")
@@ -92,9 +85,8 @@ class TestScenarioFSRepository:
 
         assert len(repository._load_all()) == 0
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_delete_many(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_delete_many(self, scenario):
+        repository = _ScenarioFSRepository()
 
         for i in range(10):
             scenario.id = ScenarioId(f"scenario-{i}")
@@ -107,9 +99,8 @@ class TestScenarioFSRepository:
 
         assert len(repository._load_all()) == 7
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_delete_by(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_delete_by(self, scenario):
+        repository = _ScenarioFSRepository()
 
         # Create 5 entities with version 1.0 and 5 entities with version 2.0
         for i in range(10):
@@ -123,9 +114,8 @@ class TestScenarioFSRepository:
 
         assert len(repository._load_all()) == 5
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_search(self, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_search(self, scenario):
+        repository = _ScenarioFSRepository()
 
         for i in range(10):
             scenario.id = ScenarioId(f"scenario-{i}")
@@ -143,12 +133,11 @@ class TestScenarioFSRepository:
 
         assert repository._search("id", "scenario-2", filters=[{"version": "non_existed_version"}]) == []
 
-    @pytest.mark.parametrize("repo", [_ScenarioFSRepository, _ScenarioSQLRepository])
-    def test_export(self, tmpdir, scenario, repo, init_sql_repo):
-        repository = repo()
+    def test_export(self, tmpdir, scenario):
+        repository = _ScenarioFSRepository()
         repository._save(scenario)
 
         repository._export(scenario.id, tmpdir.strpath)
-        dir_path = repository.dir_path if repo == _ScenarioFSRepository else os.path.join(tmpdir.strpath, "scenario")
+        dir_path = repository.dir_path
 
         assert os.path.exists(os.path.join(dir_path, f"{scenario.id}.json"))

+ 0 - 274
tests/core/sequence/test_sequence_manager_with_sql_repo.py

@@ -1,274 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import pytest
-
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core._version._version_manager import _VersionManager
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.data.in_memory import InMemoryDataNode
-from taipy.core.exceptions import SequenceAlreadyExists
-from taipy.core.job._job_manager import _JobManager
-from taipy.core.scenario._scenario_manager import _ScenarioManager
-from taipy.core.scenario.scenario import Scenario
-from taipy.core.sequence._sequence_manager import _SequenceManager
-from taipy.core.sequence.sequence_id import SequenceId
-from taipy.core.task._task_manager import _TaskManager
-from taipy.core.task.task import Task
-from taipy.core.task.task_id import TaskId
-
-
-def test_set_and_get_sequence(init_sql_repo):
-    input_dn = InMemoryDataNode("foo", Scope.SCENARIO)
-    output_dn = InMemoryDataNode("foo", Scope.SCENARIO)
-    task = Task("task", {}, print, [input_dn], [output_dn], TaskId("task_id"))
-
-    scenario = Scenario("scenario", {task}, {}, set())
-    _ScenarioManager._set(scenario)
-
-    sequence_name_1 = "p1"
-    sequence_id_1 = SequenceId(f"SEQUENCE_{sequence_name_1}_{scenario.id}")
-    sequence_name_2 = "p2"
-    sequence_id_2 = SequenceId(f"SEQUENCE_{sequence_name_2}_{scenario.id}")
-
-    # No existing Sequence
-    assert _SequenceManager._get(sequence_id_1) is None
-    assert _SequenceManager._get(sequence_id_2) is None
-
-    scenario.add_sequences({sequence_name_1: []})
-    sequence_1 = scenario.sequences[sequence_name_1]
-
-    # Save one sequence. We expect to have only one sequence stored
-    _SequenceManager._set(sequence_1)
-    assert _SequenceManager._get(sequence_id_1).id == sequence_1.id
-    assert len(_SequenceManager._get(sequence_id_1).tasks) == 0
-    assert _SequenceManager._get(sequence_1).id == sequence_1.id
-    assert len(_SequenceManager._get(sequence_1).tasks) == 0
-    assert _SequenceManager._get(sequence_id_2) is None
-
-    # Save a second sequence. Now, we expect to have a total of two sequences stored
-    _TaskManager._set(task)
-    scenario.add_sequences({sequence_name_2: [task]})
-    sequence_2 = scenario.sequences[sequence_name_2]
-    assert _SequenceManager._get(sequence_id_1).id == sequence_1.id
-    assert len(_SequenceManager._get(sequence_id_1).tasks) == 0
-    assert _SequenceManager._get(sequence_1).id == sequence_1.id
-    assert len(_SequenceManager._get(sequence_1).tasks) == 0
-    assert _SequenceManager._get(sequence_id_2).id == sequence_2.id
-    assert len(_SequenceManager._get(sequence_id_2).tasks) == 1
-    assert _SequenceManager._get(sequence_2).id == sequence_2.id
-    assert len(_SequenceManager._get(sequence_2).tasks) == 1
-
-    # We save the first sequence again. We expect an exception and nothing to change
-    with pytest.raises(SequenceAlreadyExists):
-        scenario.add_sequences({sequence_name_1: {}})
-    sequence_1 = scenario.sequences[sequence_name_1]
-    assert _SequenceManager._get(sequence_id_1).id == sequence_1.id
-    assert len(_SequenceManager._get(sequence_id_1).tasks) == 0
-    assert _SequenceManager._get(sequence_1).id == sequence_1.id
-    assert len(_SequenceManager._get(sequence_1).tasks) == 0
-    assert _SequenceManager._get(sequence_id_2).id == sequence_2.id
-    assert len(_SequenceManager._get(sequence_id_2).tasks) == 1
-    assert _SequenceManager._get(sequence_2).id == sequence_2.id
-    assert len(_SequenceManager._get(sequence_2).tasks) == 1
-
-
-def test_get_all_on_multiple_versions_environment(init_sql_repo):
-    # Create 5 sequences from Scenario with 2 versions each
-    for version in range(1, 3):
-        for i in range(5):
-            _ScenarioManager._set(
-                Scenario(
-                    f"config_id_{i+version}",
-                    [],
-                    {},
-                    [],
-                    f"SCENARIO_id_{i}_v{version}",
-                    version=f"{version}.0",
-                    sequences={"sequence": {}},
-                )
-            )
-
-    _VersionManager._set_experiment_version("1.0")
-    assert len(_SequenceManager._get_all()) == 5
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "1.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v1"}])) == 1
-    )
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "2.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v1"}])) == 0
-    )
-
-    _VersionManager._set_experiment_version("2.0")
-    assert len(_SequenceManager._get_all()) == 5
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "2.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v1"}])) == 0
-    )
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "2.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v2"}])) == 1
-    )
-
-    _VersionManager._set_development_version("1.0")
-    assert len(_SequenceManager._get_all()) == 5
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "1.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v1"}])) == 1
-    )
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "1.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v2"}])) == 0
-    )
-
-    _VersionManager._set_development_version("2.0")
-    assert len(_SequenceManager._get_all()) == 5
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "2.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v1"}])) == 0
-    )
-    assert (
-        len(_SequenceManager._get_all_by(filters=[{"version": "2.0", "id": "SEQUENCE_sequence_SCENARIO_id_1_v2"}])) == 1
-    )
-
-
-def mult_by_two(nb: int):
-    return nb * 2
-
-
-def mult_by_3(nb: int):
-    return nb * 3
-
-
-def test_get_or_create_data(init_sql_repo):
-    # only create intermediate data node once
-    dn_config_1 = Config.configure_data_node("foo", "in_memory", Scope.SCENARIO, default_data=1)
-    dn_config_2 = Config.configure_data_node("bar", "in_memory", Scope.SCENARIO, default_data=0)
-    dn_config_6 = Config.configure_data_node("baz", "in_memory", Scope.SCENARIO, default_data=0)
-
-    task_config_mult_by_two = Config.configure_task("mult_by_two", mult_by_two, [dn_config_1], dn_config_2)
-    task_config_mult_by_3 = Config.configure_task("mult_by_3", mult_by_3, [dn_config_2], dn_config_6)
-    # dn_1 ---> mult_by_two ---> dn_2 ---> mult_by_3 ---> dn_6
-    scenario_config = Config.configure_scenario("scenario", [task_config_mult_by_two, task_config_mult_by_3])
-
-    assert len(_DataManager._get_all()) == 0
-    assert len(_TaskManager._get_all()) == 0
-
-    scenario = _ScenarioManager._create(scenario_config)
-    scenario.add_sequences({"by_6": list(scenario.tasks.values())})
-    sequence = scenario.sequences["by_6"]
-
-    assert sequence.name == "by_6"
-
-    assert len(_DataManager._get_all()) == 3
-    assert len(_TaskManager._get_all()) == 2
-    assert len(sequence._get_sorted_tasks()) == 2
-    assert sequence.foo.read() == 1
-    assert sequence.bar.read() == 0
-    assert sequence.baz.read() == 0
-    assert sequence._get_sorted_tasks()[0][0].config_id == task_config_mult_by_two.id
-    assert sequence._get_sorted_tasks()[1][0].config_id == task_config_mult_by_3.id
-
-    _SequenceManager._submit(sequence.id)
-    assert sequence.foo.read() == 1
-    assert sequence.bar.read() == 2
-    assert sequence.baz.read() == 6
-
-    sequence.foo.write("new data value")
-    assert sequence.foo.read() == "new data value"
-    assert sequence.bar.read() == 2
-    assert sequence.baz.read() == 6
-
-    sequence.bar.write(7)
-    assert sequence.foo.read() == "new data value"
-    assert sequence.bar.read() == 7
-    assert sequence.baz.read() == 6
-
-    with pytest.raises(AttributeError):
-        sequence.WRONG.write(7)
-
-
-def test_hard_delete_one_single_sequence_with_scenario_data_nodes(init_sql_repo):
-    dn_input_config = Config.configure_data_node("my_input", "in_memory", scope=Scope.SCENARIO, default_data="testing")
-    dn_output_config = Config.configure_data_node("my_output", "in_memory", scope=Scope.SCENARIO)
-    task_config = Config.configure_task("task_config", print, dn_input_config, dn_output_config)
-
-    tasks = _TaskManager._bulk_get_or_create([task_config])
-    scenario = Scenario("scenario", set(tasks), {}, sequences={"sequence": {"tasks": tasks}})
-    _ScenarioManager._set(scenario)
-
-    sequence = scenario.sequences["sequence"]
-    sequence.submit()
-
-    assert len(_ScenarioManager._get_all()) == 1
-    assert len(_SequenceManager._get_all()) == 1
-    assert len(_TaskManager._get_all()) == 1
-    assert len(_DataManager._get_all()) == 2
-    assert len(_JobManager._get_all()) == 1
-    _SequenceManager._hard_delete(sequence.id)
-    assert len(_ScenarioManager._get_all()) == 1
-    assert len(_SequenceManager._get_all()) == 0
-    assert len(_TaskManager._get_all()) == 1
-    assert len(_DataManager._get_all()) == 2
-    assert len(_JobManager._get_all()) == 1
-
-
-def test_hard_delete_one_single_sequence_with_cycle_data_nodes(init_sql_repo):
-    dn_input_config = Config.configure_data_node("my_input", "in_memory", scope=Scope.CYCLE, default_data="testing")
-    dn_output_config = Config.configure_data_node("my_output", "in_memory", scope=Scope.CYCLE)
-    task_config = Config.configure_task("task_config", print, dn_input_config, dn_output_config)
-
-    tasks = _TaskManager._bulk_get_or_create([task_config])
-    scenario = Scenario("scenario", tasks, {}, sequences={"sequence": {"tasks": tasks}})
-    _ScenarioManager._set(scenario)
-
-    sequence = scenario.sequences["sequence"]
-    sequence.submit()
-
-    assert len(_ScenarioManager._get_all()) == 1
-    assert len(_SequenceManager._get_all()) == 1
-    assert len(_TaskManager._get_all()) == 1
-    assert len(_DataManager._get_all()) == 2
-    assert len(_JobManager._get_all()) == 1
-    _SequenceManager._hard_delete(sequence.id)
-    assert len(_ScenarioManager._get_all()) == 1
-    assert len(_SequenceManager._get_all()) == 0
-    assert len(_TaskManager._get_all()) == 1
-    assert len(_DataManager._get_all()) == 2
-    assert len(_JobManager._get_all()) == 1
-
-
-def test_hard_delete_shared_entities(init_sql_repo):
-    input_dn = Config.configure_data_node("my_input", "in_memory", scope=Scope.SCENARIO, default_data="testing")
-    intermediate_dn = Config.configure_data_node("my_inter", "in_memory", scope=Scope.GLOBAL, default_data="testing")
-    output_dn = Config.configure_data_node("my_output", "in_memory", scope=Scope.GLOBAL, default_data="testing")
-    task_1 = Config.configure_task("task_1", print, input_dn, intermediate_dn)
-    task_2 = Config.configure_task("task_2", print, intermediate_dn, output_dn)
-
-    tasks_scenario_1 = _TaskManager._bulk_get_or_create([task_1, task_2], scenario_id="scenario_id_1")
-    tasks_scenario_2 = _TaskManager._bulk_get_or_create([task_1, task_2], scenario_id="scenario_id_2")
-
-    scenario_1 = Scenario("scenario_1", tasks_scenario_1, {}, sequences={"sequence": {"tasks": tasks_scenario_1}})
-    scenario_2 = Scenario("scenario_2", tasks_scenario_2, {}, sequences={"sequence": {"tasks": tasks_scenario_2}})
-    _ScenarioManager._set(scenario_1)
-    _ScenarioManager._set(scenario_2)
-    sequence_1 = scenario_1.sequences["sequence"]
-    sequence_2 = scenario_2.sequences["sequence"]
-
-    _SequenceManager._submit(sequence_1.id)
-    _SequenceManager._submit(sequence_2.id)
-
-    assert len(_ScenarioManager._get_all()) == 2
-    assert len(_SequenceManager._get_all()) == 2
-    assert len(_TaskManager._get_all()) == 3
-    assert len(_DataManager._get_all()) == 4
-    assert len(_JobManager._get_all()) == 4
-    _SequenceManager._hard_delete(sequence_1.id)
-    assert len(_ScenarioManager._get_all()) == 2
-    assert len(_SequenceManager._get_all()) == 1
-    assert len(_TaskManager._get_all()) == 3
-    assert len(_DataManager._get_all()) == 4
-    assert len(_JobManager._get_all()) == 4

+ 0 - 180
tests/core/submission/test_submission_manager_with_sql_repo.py

@@ -1,180 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-from datetime import datetime
-from time import sleep
-
-import pytest
-
-from taipy.core import Task
-from taipy.core._version._version_manager_factory import _VersionManagerFactory
-from taipy.core.exceptions.exceptions import SubmissionNotDeletedException
-from taipy.core.submission._submission_manager_factory import _SubmissionManagerFactory
-from taipy.core.submission.submission import Submission
-from taipy.core.submission.submission_status import SubmissionStatus
-
-
-def test_create_submission(scenario, init_sql_repo):
-    submission_1 = _SubmissionManagerFactory._build_manager()._create(
-        scenario.id, scenario._ID_PREFIX, scenario.config_id, debug=True, log="log_file", retry_note=5
-    )
-
-    assert isinstance(submission_1, Submission)
-    assert submission_1.id is not None
-    assert submission_1.entity_id == scenario.id
-    assert submission_1.jobs == []
-    assert submission_1.properties == {"debug": True, "log": "log_file", "retry_note": 5}
-    assert isinstance(submission_1.creation_date, datetime)
-    assert submission_1._submission_status == SubmissionStatus.SUBMITTED
-
-
-def test_get_submission(init_sql_repo):
-    submission_manager = _SubmissionManagerFactory._build_manager()
-
-    submission_1 = submission_manager._create(
-        "entity_id", "ENTITY_TYPE", "entity_config_id", debug=True, log="log_file", retry_note=5
-    )
-    submission_2 = submission_manager._get(submission_1.id)
-
-    assert submission_1.id == submission_2.id
-    assert submission_1.entity_id == submission_2.entity_id == "entity_id"
-    assert submission_1.jobs == submission_2.jobs
-    assert submission_1.creation_date == submission_2.creation_date
-    assert submission_1.submission_status == submission_2.submission_status
-    assert submission_1.properties == {"debug": True, "log": "log_file", "retry_note": 5}
-    assert submission_1.properties == submission_2.properties
-
-
-def test_get_all_submission(init_sql_repo):
-    submission_manager = _SubmissionManagerFactory._build_manager()
-    version_manager = _VersionManagerFactory._build_manager()
-
-    submission_manager._set(
-        Submission("entity_id", "submission_id", "entity_config_id", version=version_manager._get_latest_version())
-    )
-    for version_name in ["abc", "xyz"]:
-        for i in range(10):
-            submission_manager._set(
-                Submission("entity_id", f"submission_{version_name}_{i}", "entity_config_id", version=f"{version_name}")
-            )
-    assert len(submission_manager._get_all()) == 1
-
-    version_manager._set_experiment_version("xyz")
-    version_manager._set_experiment_version("abc")
-    assert len(submission_manager._get_all()) == 10
-    assert len(submission_manager._get_all("abc")) == 10
-    assert len(submission_manager._get_all("xyz")) == 10
-
-
-def test_get_latest_submission(init_sql_repo):
-    task_1 = Task("task_config_1", {}, print, id="task_id_1")
-    task_2 = Task("task_config_2", {}, print, id="task_id_2")
-
-    submission_manager = _SubmissionManagerFactory._build_manager()
-    submission_1 = submission_manager._create(task_1.id, task_1._ID_PREFIX, task_1.config_id)
-    assert submission_manager._get_latest(task_1) == submission_1
-    assert submission_manager._get_latest(task_2) is None
-
-    sleep(0.01)  # Comparison is based on time, precision on Windows is not enough important
-    submission_2 = submission_manager._create(task_2.id, task_2._ID_PREFIX, task_2.config_id)
-    assert submission_manager._get_latest(task_1) == submission_1
-    assert submission_manager._get_latest(task_2) == submission_2
-
-    sleep(0.01)  # Comparison is based on time, precision on Windows is not enough important
-    submission_3 = submission_manager._create(task_1.id, task_1._ID_PREFIX, task_1.config_id)
-    assert submission_manager._get_latest(task_1) == submission_3
-    assert submission_manager._get_latest(task_2) == submission_2
-
-    sleep(0.01)  # Comparison is based on time, precision on Windows is not enough important
-    submission_4 = submission_manager._create(task_2.id, task_2._ID_PREFIX, task_2.config_id)
-    assert submission_manager._get_latest(task_1) == submission_3
-    assert submission_manager._get_latest(task_2) == submission_4
-
-
-def test_delete_submission(init_sql_repo):
-    submission_manager = _SubmissionManagerFactory._build_manager()
-
-    submission = Submission("entity_id", "submission_id", "entity_config_id")
-    submission_manager._set(submission)
-
-    with pytest.raises(SubmissionNotDeletedException):
-        submission_manager._delete(submission.id)
-
-    submission.submission_status = SubmissionStatus.COMPLETED
-
-    for i in range(10):
-        submission_manager._set(Submission("entity_id", f"submission_{i}", "entity_config_id"))
-
-    assert len(submission_manager._get_all()) == 11
-    assert isinstance(submission_manager._get(submission.id), Submission)
-
-    submission_manager._delete(submission.id)
-    assert len(submission_manager._get_all()) == 10
-    assert submission_manager._get(submission.id) is None
-
-    submission_manager._delete_all()
-    assert len(submission_manager._get_all()) == 0
-
-
-def test_is_deletable(init_sql_repo):
-    submission_manager = _SubmissionManagerFactory._build_manager()
-
-    submission = Submission("entity_id", "submission_id", "entity_config_id")
-    submission_manager._set(submission)
-
-    assert len(submission_manager._get_all()) == 1
-
-    assert submission._submission_status == SubmissionStatus.SUBMITTED
-    assert not submission.is_deletable()
-    assert not submission_manager._is_deletable(submission)
-    assert not submission_manager._is_deletable(submission.id)
-
-    submission.submission_status = SubmissionStatus.UNDEFINED
-    assert submission.submission_status == SubmissionStatus.UNDEFINED
-    assert submission.is_deletable()
-    assert submission_manager._is_deletable(submission)
-    assert submission_manager._is_deletable(submission.id)
-
-    submission.submission_status = SubmissionStatus.CANCELED
-    assert submission.submission_status == SubmissionStatus.CANCELED
-    assert submission.is_deletable()
-    assert submission_manager._is_deletable(submission)
-    assert submission_manager._is_deletable(submission.id)
-
-    submission.submission_status = SubmissionStatus.FAILED
-    assert submission.submission_status == SubmissionStatus.FAILED
-    assert submission.is_deletable()
-    assert submission_manager._is_deletable(submission)
-    assert submission_manager._is_deletable(submission.id)
-
-    submission.submission_status = SubmissionStatus.BLOCKED
-    assert submission.submission_status == SubmissionStatus.BLOCKED
-    assert not submission.is_deletable()
-    assert not submission_manager._is_deletable(submission)
-    assert not submission_manager._is_deletable(submission.id)
-
-    submission.submission_status = SubmissionStatus.RUNNING
-    assert submission.submission_status == SubmissionStatus.RUNNING
-    assert not submission.is_deletable()
-    assert not submission_manager._is_deletable(submission)
-    assert not submission_manager._is_deletable(submission.id)
-
-    submission.submission_status = SubmissionStatus.PENDING
-    assert submission.submission_status == SubmissionStatus.PENDING
-    assert not submission.is_deletable()
-    assert not submission_manager._is_deletable(submission)
-    assert not submission_manager._is_deletable(submission.id)
-
-    submission.submission_status = SubmissionStatus.COMPLETED
-    assert submission.submission_status == SubmissionStatus.COMPLETED
-    assert submission.is_deletable()
-    assert submission_manager._is_deletable(submission)
-    assert submission_manager._is_deletable(submission.id)

+ 9 - 45
tests/core/submission/test_submission_repositories.py

@@ -21,22 +21,10 @@ from taipy.core.submission._submission_manager_factory import _SubmissionManager
 from taipy.core.submission.submission import Submission
 from taipy.core.task._task_manager_factory import _TaskManagerFactory
 from taipy.core.task.task import Task
-from tests.core.conftest import init_sql_repo
-
-
-def configure_fs_repo():
-    Config.configure_core(repository_type="default")
-
-
-def configure_sql_repo():
-    init_sql_repo  # noqa: B018
 
 
 class TestSubmissionRepository:
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_save_and_load(self, data_node, job, configure_repo):
-        configure_repo()
-
+    def test_save_and_load(self, data_node, job):
         _DataManagerFactory._build_manager()._repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
         _TaskManagerFactory._build_manager()._repository._save(task)
@@ -57,10 +45,7 @@ class TestSubmissionRepository:
         assert obj.entity_config_id == task.config_id
         assert obj.properties == {"debug": True, "log": "log_file", "retry_note": 5}
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_exists(self, configure_repo):
-        configure_repo()
-
+    def test_exists(self):
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id")
         submission_repository = _SubmissionManagerFactory._build_manager()._repository
         submission_repository._save(submission)
@@ -68,10 +53,7 @@ class TestSubmissionRepository:
         assert submission_repository._exists(submission.id)
         assert not submission_repository._exists("not-existed-submission")
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_load_all(self, configure_repo):
-        configure_repo()
-
+    def test_load_all(self):
         repository = _SubmissionManagerFactory._build_manager()._repository
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id")
         for i in range(10):
@@ -81,10 +63,7 @@ class TestSubmissionRepository:
 
         assert len(submissions) == 10
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_delete(self, configure_repo):
-        configure_repo()
-
+    def test_delete(self):
         repository = _SubmissionManagerFactory._build_manager()._repository
 
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id")
@@ -95,10 +74,7 @@ class TestSubmissionRepository:
         with pytest.raises(ModelNotFound):
             repository._load(submission.id)
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_delete_all(self, configure_repo):
-        configure_repo()
-
+    def test_delete_all(self):
         submission_repository = _SubmissionManagerFactory._build_manager()._repository
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id")
 
@@ -112,10 +88,7 @@ class TestSubmissionRepository:
 
         assert len(submission_repository._load_all()) == 0
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_delete_many(self, configure_repo):
-        configure_repo()
-
+    def test_delete_many(self):
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id")
         submission_repository = _SubmissionManagerFactory._build_manager()._repository
 
@@ -130,10 +103,7 @@ class TestSubmissionRepository:
 
         assert len(submission_repository._load_all()) == 7
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_delete_by(self, configure_repo):
-        configure_repo()
-
+    def test_delete_by(self):
         # Create 5 entities with version 1.0 and 5 entities with version 2.0
         submission_repository = _SubmissionManagerFactory._build_manager()._repository
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id")
@@ -149,10 +119,7 @@ class TestSubmissionRepository:
 
         assert len(submission_repository._load_all()) == 5
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_search(self, configure_repo):
-        configure_repo()
-
+    def test_search(self):
         submission_repository = _SubmissionManagerFactory._build_manager()._repository
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id", version="random_version_number")
         for i in range(10):
@@ -171,10 +138,7 @@ class TestSubmissionRepository:
 
         assert submission_repository._search("id", "submission-2", filters=[{"version": "non_existed_version"}]) == []
 
-    @pytest.mark.parametrize("configure_repo", [configure_fs_repo, configure_sql_repo])
-    def test_export(self, tmpdir, configure_repo):
-        configure_repo()
-
+    def test_export(self, tmpdir):
         repository = _SubmissionManagerFactory._build_manager()._repository
         submission = Submission("entity_id", "ENTITY_TYPE", "entity_config_id")
         repository._save(submission)

+ 0 - 389
tests/core/task/test_task_manager_with_sql_repo.py

@@ -1,389 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import uuid
-from unittest import mock
-
-import pytest
-
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core._orchestrator._orchestrator import _Orchestrator
-from taipy.core._version._version_manager import _VersionManager
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.data.in_memory import InMemoryDataNode
-from taipy.core.exceptions.exceptions import ModelNotFound, NonExistingTask
-from taipy.core.task._task_manager import _TaskManager
-from taipy.core.task.task import Task
-from taipy.core.task.task_id import TaskId
-
-
-def test_create_and_save(init_sql_repo):
-    input_configs = [Config.configure_data_node("my_input", "in_memory")]
-    output_configs = Config.configure_data_node("my_output", "in_memory")
-    task_config = Config.configure_task("foo", print, input_configs, output_configs)
-    task = _create_task_from_config(task_config)
-    assert task.id is not None
-    assert task.config_id == "foo"
-    assert len(task.input) == 1
-    assert len(_DataManager._get_all()) == 2
-    assert task.my_input.id is not None
-    assert task.my_input.config_id == "my_input"
-    assert task.my_output.id is not None
-    assert task.my_output.config_id == "my_output"
-    assert task.function == print
-    assert task.parent_ids == set()
-
-    task_retrieved_from_manager = _TaskManager._get(task.id)
-    assert task_retrieved_from_manager.id == task.id
-    assert task_retrieved_from_manager.config_id == task.config_id
-    assert len(task_retrieved_from_manager.input) == len(task.input)
-    assert task_retrieved_from_manager.my_input.id is not None
-    assert task_retrieved_from_manager.my_input.config_id == task.my_input.config_id
-    assert task_retrieved_from_manager.my_output.id is not None
-    assert task_retrieved_from_manager.my_output.config_id == task.my_output.config_id
-    assert task_retrieved_from_manager.function == task.function
-    assert task_retrieved_from_manager.parent_ids == set()
-
-
-def test_do_not_recreate_existing_data_node(init_sql_repo):
-    input_config = Config.configure_data_node("my_input", "in_memory", scope=Scope.SCENARIO)
-    output_config = Config.configure_data_node("my_output", "in_memory", scope=Scope.SCENARIO)
-
-    _DataManager._create_and_set(input_config, "scenario_id", "task_id")
-    assert len(_DataManager._get_all()) == 1
-
-    task_config = Config.configure_task("foo", print, input_config, output_config)
-    _create_task_from_config(task_config, scenario_id="scenario_id")
-    assert len(_DataManager._get_all()) == 2
-
-
-def test_do_not_recreate_existing_task(init_sql_repo):
-    assert len(_TaskManager._get_all()) == 0
-
-    input_config_scope_scenario = Config.configure_data_node("my_input_1", "in_memory", Scope.SCENARIO)
-    output_config_scope_scenario = Config.configure_data_node("my_output_1", "in_memory", Scope.SCENARIO)
-    task_config_1 = Config.configure_task("bar", print, input_config_scope_scenario, output_config_scope_scenario)
-    # task_config_1 scope is Scenario
-
-    task_1 = _create_task_from_config(task_config_1)
-    assert len(_TaskManager._get_all()) == 1
-    task_2 = _create_task_from_config(task_config_1)  # Do not create. It already exists for None scenario
-    assert len(_TaskManager._get_all()) == 1
-    assert task_1.id == task_2.id
-    task_3 = _create_task_from_config(task_config_1, None, None)  # Do not create. It already exists for None scenario
-    assert len(_TaskManager._get_all()) == 1
-    assert task_1.id == task_2.id
-    assert task_2.id == task_3.id
-    task_4 = _create_task_from_config(task_config_1, None, "scenario_1")  # Create even if sequence is the same.
-    assert len(_TaskManager._get_all()) == 2
-    assert task_1.id == task_2.id
-    assert task_2.id == task_3.id
-    assert task_3.id != task_4.id
-    task_5 = _create_task_from_config(
-        task_config_1, None, "scenario_1"
-    )  # Do not create. It already exists for scenario_1
-    assert len(_TaskManager._get_all()) == 2
-    assert task_1.id == task_2.id
-    assert task_2.id == task_3.id
-    assert task_3.id != task_4.id
-    assert task_4.id == task_5.id
-    task_6 = _create_task_from_config(task_config_1, None, "scenario_2")
-    assert len(_TaskManager._get_all()) == 3
-    assert task_1.id == task_2.id
-    assert task_2.id == task_3.id
-    assert task_3.id != task_4.id
-    assert task_4.id == task_5.id
-    assert task_5.id != task_6.id
-    assert task_3.id != task_6.id
-
-    input_config_scope_cycle = Config.configure_data_node("my_input_2", "in_memory", Scope.CYCLE)
-    output_config_scope_cycle = Config.configure_data_node("my_output_2", "in_memory", Scope.CYCLE)
-    task_config_2 = Config.configure_task("xyz", print, input_config_scope_cycle, output_config_scope_cycle)
-    # task_config_3 scope is Cycle
-
-    task_7 = _create_task_from_config(task_config_2)
-    assert len(_TaskManager._get_all()) == 4
-    task_8 = _create_task_from_config(task_config_2)  # Do not create. It already exists for None cycle
-    assert len(_TaskManager._get_all()) == 4
-    assert task_7.id == task_8.id
-    task_9 = _create_task_from_config(task_config_2, None, None)  # Do not create. It already exists for None cycle
-    assert len(_TaskManager._get_all()) == 4
-    assert task_7.id == task_8.id
-    assert task_8.id == task_9.id
-    task_10 = _create_task_from_config(
-        task_config_2, None, "scenario"
-    )  # Do not create. It already exists for None cycle
-    assert len(_TaskManager._get_all()) == 4
-    assert task_7.id == task_8.id
-    assert task_8.id == task_9.id
-    assert task_9.id == task_10.id
-    task_11 = _create_task_from_config(
-        task_config_2, None, "scenario"
-    )  # Do not create. It already exists for None cycle
-    assert len(_TaskManager._get_all()) == 4
-    assert task_7.id == task_8.id
-    assert task_8.id == task_9.id
-    assert task_9.id == task_10.id
-    assert task_10.id == task_11.id
-    task_12 = _create_task_from_config(task_config_2, "cycle", None)
-    assert len(_TaskManager._get_all()) == 5
-    assert task_7.id == task_8.id
-    assert task_8.id == task_9.id
-    assert task_9.id == task_10.id
-    assert task_10.id == task_11.id
-    assert task_11.id != task_12.id
-    task_13 = _create_task_from_config(task_config_2, "cycle", None)
-    assert len(_TaskManager._get_all()) == 5
-    assert task_7.id == task_8.id
-    assert task_8.id == task_9.id
-    assert task_9.id == task_10.id
-    assert task_10.id == task_11.id
-    assert task_11.id != task_12.id
-    assert task_12.id == task_13.id
-
-
-def test_set_and_get_task(init_sql_repo):
-    task_id_1 = TaskId("id1")
-    first_task = Task("name_1", {}, print, [], [], task_id_1)
-    task_id_2 = TaskId("id2")
-    second_task = Task("name_2", {}, print, [], [], task_id_2)
-    third_task_with_same_id_as_first_task = Task("name_is_not_1_anymore", {}, print, [], [], task_id_1)
-
-    # No task at initialization
-
-    assert len(_TaskManager._get_all()) == 0
-    assert _TaskManager._get(task_id_1) is None
-    assert _TaskManager._get(first_task) is None
-    assert _TaskManager._get(task_id_2) is None
-    assert _TaskManager._get(second_task) is None
-
-    # Save one task. We expect to have only one task stored
-    _TaskManager._set(first_task)
-    assert len(_TaskManager._get_all()) == 1
-    assert _TaskManager._get(task_id_1).id == first_task.id
-    assert _TaskManager._get(first_task).id == first_task.id
-    assert _TaskManager._get(task_id_2) is None
-    assert _TaskManager._get(second_task) is None
-
-    # Save a second task. Now, we expect to have a total of two tasks stored
-    _TaskManager._set(second_task)
-    assert len(_TaskManager._get_all()) == 2
-    assert _TaskManager._get(task_id_1).id == first_task.id
-    assert _TaskManager._get(first_task).id == first_task.id
-    assert _TaskManager._get(task_id_2).id == second_task.id
-    assert _TaskManager._get(second_task).id == second_task.id
-
-    # We save the first task again. We expect nothing to change
-    _TaskManager._set(first_task)
-    assert len(_TaskManager._get_all()) == 2
-    assert _TaskManager._get(task_id_1).id == first_task.id
-    assert _TaskManager._get(first_task).id == first_task.id
-    assert _TaskManager._get(task_id_2).id == second_task.id
-    assert _TaskManager._get(second_task).id == second_task.id
-
-    # We save a third task with same id as the first one.
-    # We expect the first task to be updated
-    _TaskManager._set(third_task_with_same_id_as_first_task)
-    assert len(_TaskManager._get_all()) == 2
-    assert _TaskManager._get(task_id_1).id == third_task_with_same_id_as_first_task.id
-    assert _TaskManager._get(task_id_1).config_id == third_task_with_same_id_as_first_task.config_id
-    assert _TaskManager._get(first_task).id == third_task_with_same_id_as_first_task.id
-    assert _TaskManager._get(task_id_2).id == second_task.id
-    assert _TaskManager._get(second_task).id == second_task.id
-
-
-def test_get_all_on_multiple_versions_environment(init_sql_repo):
-    # Create 5 tasks with 2 versions each
-    # Only version 1.0 has the task with config_id = "config_id_1"
-    # Only version 2.0 has the task with config_id = "config_id_6"
-    for version in range(1, 3):
-        for i in range(5):
-            _TaskManager._set(
-                Task(
-                    f"config_id_{i+version}", {}, print, [], [], id=TaskId(f"id{i}_v{version}"), version=f"{version}.0"
-                )
-            )
-
-    _VersionManager._set_experiment_version("1.0")
-    assert len(_TaskManager._get_all()) == 5
-    assert len(_TaskManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_1"}])) == 1
-    assert len(_TaskManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_6"}])) == 0
-
-    _VersionManager._set_experiment_version("2.0")
-    assert len(_TaskManager._get_all()) == 5
-    assert len(_TaskManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_1"}])) == 0
-    assert len(_TaskManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_6"}])) == 1
-
-    _VersionManager._set_development_version("1.0")
-    assert len(_TaskManager._get_all()) == 5
-    assert len(_TaskManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_1"}])) == 1
-    assert len(_TaskManager._get_all_by(filters=[{"version": "1.0", "config_id": "config_id_6"}])) == 0
-
-    _VersionManager._set_development_version("2.0")
-    assert len(_TaskManager._get_all()) == 5
-    assert len(_TaskManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_1"}])) == 0
-    assert len(_TaskManager._get_all_by(filters=[{"version": "2.0", "config_id": "config_id_6"}])) == 1
-
-
-def test_ensure_conservation_of_order_of_data_nodes_on_task_creation(init_sql_repo):
-    embedded_1 = Config.configure_data_node("dn_1", "in_memory", scope=Scope.SCENARIO)
-    embedded_2 = Config.configure_data_node("dn_2", "in_memory", scope=Scope.SCENARIO)
-    embedded_3 = Config.configure_data_node("a_dn_3", "in_memory", scope=Scope.SCENARIO)
-    embedded_4 = Config.configure_data_node("dn_4", "in_memory", scope=Scope.SCENARIO)
-    embedded_5 = Config.configure_data_node("dn_5", "in_memory", scope=Scope.SCENARIO)
-
-    input = [embedded_1, embedded_2, embedded_3]
-    output = [embedded_4, embedded_5]
-
-    task_config_1 = Config.configure_task("name_1", print, input, output)
-    task_config_2 = Config.configure_task("name_2", print, input, output)
-
-    task_1, task_2 = _TaskManager._bulk_get_or_create([task_config_1, task_config_2])
-
-    assert [i.config_id for i in task_1.input.values()] == [embedded_1.id, embedded_2.id, embedded_3.id]
-    assert [o.config_id for o in task_1.output.values()] == [embedded_4.id, embedded_5.id]
-
-    assert [i.config_id for i in task_2.input.values()] == [embedded_1.id, embedded_2.id, embedded_3.id]
-    assert [o.config_id for o in task_2.output.values()] == [embedded_4.id, embedded_5.id]
-
-
-def test_delete_raise_exception(init_sql_repo):
-    dn_input_config_1 = Config.configure_data_node(
-        "my_input_1", "in_memory", scope=Scope.SCENARIO, default_data="testing"
-    )
-    dn_output_config_1 = Config.configure_data_node("my_output_1", "in_memory")
-    task_config_1 = Config.configure_task("task_config_1", print, dn_input_config_1, dn_output_config_1)
-    task_1 = _create_task_from_config(task_config_1)
-    _TaskManager._delete(task_1.id)
-
-    with pytest.raises(ModelNotFound):
-        _TaskManager._delete(task_1.id)
-
-
-def test_hard_delete(init_sql_repo):
-    dn_input_config_1 = Config.configure_data_node(
-        "my_input_1", "in_memory", scope=Scope.SCENARIO, default_data="testing"
-    )
-    dn_output_config_1 = Config.configure_data_node("my_output_1", "in_memory")
-    task_config_1 = Config.configure_task("task_config_1", print, dn_input_config_1, dn_output_config_1)
-    task_1 = _create_task_from_config(task_config_1)
-
-    assert len(_TaskManager._get_all()) == 1
-    assert len(_DataManager._get_all()) == 2
-    _TaskManager._hard_delete(task_1.id)
-    assert len(_TaskManager._get_all()) == 0
-    assert len(_DataManager._get_all()) == 2
-
-
-def test_submit_task():
-    data_node_1 = InMemoryDataNode("foo", Scope.SCENARIO, "s1")
-    data_node_2 = InMemoryDataNode("bar", Scope.SCENARIO, "s2")
-    task_1 = Task(
-        "grault",
-        {},
-        print,
-        [data_node_1],
-        [data_node_2],
-        TaskId("t1"),
-    )
-
-    class MockOrchestrator(_Orchestrator):
-        submit_calls = []
-        submit_ids = []
-
-        def submit_task(self, task, callbacks=None, force=False, wait=False, timeout=None):
-            submit_id = f"SUBMISSION_{str(uuid.uuid4())}"
-            self.submit_calls.append(task)
-            self.submit_ids.append(submit_id)
-            return None
-
-    with mock.patch("taipy.core.task._task_manager._TaskManager._orchestrator", new=MockOrchestrator):
-        # Task does not exist, we expect an exception
-        with pytest.raises(NonExistingTask):
-            _TaskManager._submit(task_1)
-        with pytest.raises(NonExistingTask):
-            _TaskManager._submit(task_1.id)
-
-        _TaskManager._set(task_1)
-        _TaskManager._submit(task_1)
-        call_ids = [call.id for call in MockOrchestrator.submit_calls]
-        assert call_ids == [task_1.id]
-        assert len(MockOrchestrator.submit_ids) == 1
-
-        _TaskManager._submit(task_1)
-        assert len(MockOrchestrator.submit_ids) == 2
-        assert len(MockOrchestrator.submit_ids) == len(set(MockOrchestrator.submit_ids))
-
-        _TaskManager._submit(task_1)
-        assert len(MockOrchestrator.submit_ids) == 3
-        assert len(MockOrchestrator.submit_ids) == len(set(MockOrchestrator.submit_ids))
-
-
-def test_get_tasks_by_config_id(init_sql_repo):
-    dn_config = Config.configure_data_node("dn", scope=Scope.SCENARIO)
-    task_config_1 = Config.configure_task("t1", print, dn_config)
-    task_config_2 = Config.configure_task("t2", print, dn_config)
-    task_config_3 = Config.configure_task("t3", print, dn_config)
-
-    t_1_1 = _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_1")[0]
-    t_1_2 = _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_2")[0]
-    t_1_3 = _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_3")[0]
-    assert len(_TaskManager._get_all()) == 3
-
-    t_2_1 = _TaskManager._bulk_get_or_create([task_config_2], scenario_id="scenario_4")[0]
-    t_2_2 = _TaskManager._bulk_get_or_create([task_config_2], scenario_id="scenario_5")[0]
-    assert len(_TaskManager._get_all()) == 5
-
-    t_3_1 = _TaskManager._bulk_get_or_create([task_config_3], scenario_id="scenario_6")[0]
-    assert len(_TaskManager._get_all()) == 6
-
-    t1_tasks = _TaskManager._get_by_config_id(task_config_1.id)
-    assert len(t1_tasks) == 3
-    assert sorted([t_1_1.id, t_1_2.id, t_1_3.id]) == sorted([task.id for task in t1_tasks])
-
-    t2_tasks = _TaskManager._get_by_config_id(task_config_2.id)
-    assert len(t2_tasks) == 2
-    assert sorted([t_2_1.id, t_2_2.id]) == sorted([task.id for task in t2_tasks])
-
-    t3_tasks = _TaskManager._get_by_config_id(task_config_3.id)
-    assert len(t3_tasks) == 1
-    assert sorted([t_3_1.id]) == sorted([task.id for task in t3_tasks])
-
-
-def test_get_scenarios_by_config_id_in_multiple_versions_environment(init_sql_repo):
-    dn_config = Config.configure_data_node("dn", scope=Scope.SCENARIO)
-    task_config_1 = Config.configure_task("t1", print, dn_config)
-    task_config_2 = Config.configure_task("t2", print, dn_config)
-
-    _VersionManager._set_experiment_version("1.0")
-    _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_1")[0]
-    _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_2")[0]
-    _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_3")[0]
-    _TaskManager._bulk_get_or_create([task_config_2], scenario_id="scenario_4")[0]
-    _TaskManager._bulk_get_or_create([task_config_2], scenario_id="scenario_5")[0]
-
-    assert len(_TaskManager._get_by_config_id(task_config_1.id)) == 3
-    assert len(_TaskManager._get_by_config_id(task_config_2.id)) == 2
-
-    _VersionManager._set_experiment_version("2.0")
-    _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_1")[0]
-    _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_2")[0]
-    _TaskManager._bulk_get_or_create([task_config_1], scenario_id="scenario_3")[0]
-    _TaskManager._bulk_get_or_create([task_config_2], scenario_id="scenario_4")[0]
-    _TaskManager._bulk_get_or_create([task_config_2], scenario_id="scenario_5")[0]
-
-    assert len(_TaskManager._get_by_config_id(task_config_1.id)) == 3
-    assert len(_TaskManager._get_by_config_id(task_config_2.id)) == 2
-
-
-def _create_task_from_config(task_config, *args, **kwargs):
-    return _TaskManager._bulk_get_or_create([task_config], *args, **kwargs)[0]

+ 29 - 55
tests/core/task/test_task_repositories.py

@@ -13,21 +13,15 @@ import os
 
 import pytest
 
-from taipy.config.config import Config
 from taipy.core.data._data_fs_repository import _DataFSRepository
-from taipy.core.data._data_sql_repository import _DataSQLRepository
 from taipy.core.exceptions import ModelNotFound
 from taipy.core.task._task_fs_repository import _TaskFSRepository
-from taipy.core.task._task_sql_repository import _TaskSQLRepository
 from taipy.core.task.task import Task, TaskId
 
 
 class TestTaskFSRepository:
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_save_and_load(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_save_and_load(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
 
@@ -46,11 +40,8 @@ class TestTaskFSRepository:
         assert task._skippable == loaded_task._skippable
         assert task._properties == loaded_task._properties
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_exists(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_exists(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
 
@@ -59,11 +50,8 @@ class TestTaskFSRepository:
         assert task_repository._exists(task.id)
         assert not task_repository._exists("not-existed-task")
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_load_all(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_load_all(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
 
@@ -74,11 +62,8 @@ class TestTaskFSRepository:
 
         assert len(data_nodes) == 10
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_load_all_with_filters(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_load_all_with_filters(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
 
@@ -90,11 +75,8 @@ class TestTaskFSRepository:
 
         assert len(objs) == 1
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_delete(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_delete(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
         task_repository._save(task)
@@ -104,11 +86,8 @@ class TestTaskFSRepository:
         with pytest.raises(ModelNotFound):
             task_repository._load(task.id)
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_delete_all(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_delete_all(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
 
@@ -122,11 +101,8 @@ class TestTaskFSRepository:
 
         assert len(task_repository._load_all()) == 0
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_delete_many(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_delete_many(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
 
@@ -141,11 +117,8 @@ class TestTaskFSRepository:
 
         assert len(task_repository._load_all()) == 7
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_delete_by(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_delete_by(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
 
@@ -161,13 +134,17 @@ class TestTaskFSRepository:
 
         assert len(task_repository._load_all()) == 5
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_search(self, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_search(self, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
-        task = Task("task_config_id", {}, print, [data_node], [data_node], version="random_version_number")
+        task = Task(
+            "task_config_id",
+            {},
+            print,
+            [data_node],
+            [data_node],
+            version="random_version_number",
+        )
 
         for i in range(10):
             task.id = TaskId(f"task-{i}")
@@ -186,16 +163,13 @@ class TestTaskFSRepository:
 
         assert task_repository._search("owner_id", "owner-2", filters=[{"version": "non_existed_version"}]) == []
 
-    @pytest.mark.parametrize("repo", [(_TaskFSRepository, _DataFSRepository), (_TaskSQLRepository, _DataSQLRepository)])
-    def test_export(self, tmpdir, data_node, repo, tmp_sqlite):
-        if repo[1] == _DataSQLRepository:
-            Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
-        task_repository, data_repository = repo[0](), repo[1]()
+    def test_export(self, tmpdir, data_node):
+        task_repository, data_repository = _TaskFSRepository(), _DataFSRepository()
         data_repository._save(data_node)
         task = Task("task_config_id", {}, print, [data_node], [data_node])
         task_repository._save(task)
 
         task_repository._export(task.id, tmpdir.strpath)
-        dir_path = task_repository.dir_path if repo[0] == _TaskFSRepository else os.path.join(tmpdir.strpath, "task")
+        dir_path = task_repository.dir_path
 
         assert os.path.exists(os.path.join(dir_path, f"{task.id}.json"))

+ 0 - 567
tests/core/test_core_cli_with_sql_repo.py

@@ -1,567 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-from unittest.mock import patch
-
-import pytest
-
-from taipy.config.common.frequency import Frequency
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core import Core
-from taipy.core._version._version_manager import _VersionManager
-from taipy.core._version._version_manager_factory import _VersionManagerFactory
-from taipy.core.common._utils import _load_fct
-from taipy.core.cycle._cycle_manager import _CycleManager
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.exceptions.exceptions import NonExistingVersion
-from taipy.core.job._job_manager import _JobManager
-from taipy.core.scenario._scenario_manager import _ScenarioManager
-from taipy.core.sequence._sequence_manager import _SequenceManager
-from taipy.core.task._task_manager import _TaskManager
-
-
-def test_core_cli_no_arguments(init_sql_repo):
-    with patch("sys.argv", ["prog"]):
-        core = Core()
-        core.run()
-        assert Config.core.mode == "development"
-        assert Config.core.version_number == _VersionManagerFactory._build_manager()._get_development_version()
-        assert not Config.core.force
-        core.stop()
-
-
-def test_core_cli_development_mode(init_sql_repo):
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-        assert Config.core.mode == "development"
-        assert Config.core.version_number == _VersionManagerFactory._build_manager()._get_development_version()
-        core.stop()
-
-
-def test_core_cli_dev_mode(init_sql_repo):
-    with patch("sys.argv", ["prog", "-dev"]):
-        core = Core()
-        core.run()
-        assert Config.core.mode == "development"
-        assert Config.core.version_number == _VersionManagerFactory._build_manager()._get_development_version()
-        core.stop()
-
-
-def test_core_cli_experiment_mode(init_sql_repo):
-    with patch("sys.argv", ["prog", "--experiment"]):
-        core = Core()
-        core.run()
-        assert Config.core.mode == "experiment"
-        assert Config.core.version_number == _VersionManagerFactory._build_manager()._get_latest_version()
-        assert not Config.core.force
-        core.stop()
-
-
-def test_core_cli_experiment_mode_with_version(init_sql_repo):
-    with patch("sys.argv", ["prog", "--experiment", "2.1"]):
-        core = Core()
-        core.run()
-        assert Config.core.mode == "experiment"
-        assert Config.core.version_number == "2.1"
-        assert not Config.core.force
-        core.stop()
-
-
-def test_core_cli_experiment_mode_with_force_version(init_sql_repo):
-    with patch("sys.argv", ["prog", "--experiment", "2.1", "--taipy-force"]):
-        core = Core()
-        core.run()
-        assert Config.core.mode == "experiment"
-        assert Config.core.version_number == "2.1"
-        assert Config.core.force
-        core.stop()
-
-
-def test_core_cli_production_mode(init_sql_repo):
-    with patch("sys.argv", ["prog", "--production"]):
-        core = Core()
-        core.run()
-        assert Config.core.mode == "production"
-        assert Config.core.version_number == _VersionManagerFactory._build_manager()._get_latest_version()
-        assert not Config.core.force
-        core.stop()
-
-
-def test_dev_mode_clean_all_entities_of_the_latest_version(init_sql_repo):
-    scenario_config = config_scenario()
-
-    # Create a scenario in development mode
-    with patch("sys.argv", ["prog"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-        # Initial assertion
-        assert len(_DataManager._get_all(version_number="all")) == 2
-        assert len(_TaskManager._get_all(version_number="all")) == 1
-        assert len(_SequenceManager._get_all(version_number="all")) == 1
-        assert len(_ScenarioManager._get_all(version_number="all")) == 1
-        assert len(_CycleManager._get_all(version_number="all")) == 1
-        assert len(_JobManager._get_all(version_number="all")) == 1
-
-    # Create a new scenario in experiment mode
-    with patch("sys.argv", ["prog", "--experiment"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-        # Assert number of entities in 2nd version
-        assert len(_DataManager._get_all(version_number="all")) == 4
-        assert len(_TaskManager._get_all(version_number="all")) == 2
-        assert len(_SequenceManager._get_all(version_number="all")) == 2
-        assert len(_ScenarioManager._get_all(version_number="all")) == 2
-        assert (
-            len(_CycleManager._get_all(version_number="all")) == 1
-        )  # No new cycle is created since old dev version use the same cycle
-        assert len(_JobManager._get_all(version_number="all")) == 2
-
-    # Run development mode again
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-
-        # The 1st dev version should be deleted run with development mode
-        assert len(_DataManager._get_all(version_number="all")) == 2
-        assert len(_TaskManager._get_all(version_number="all")) == 1
-        assert len(_SequenceManager._get_all(version_number="all")) == 1
-        assert len(_ScenarioManager._get_all(version_number="all")) == 1
-        assert len(_CycleManager._get_all(version_number="all")) == 1
-        assert len(_JobManager._get_all(version_number="all")) == 1
-
-        # Submit new dev version
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-
-        # Assert number of entities with 1 dev version and 1 exp version
-        assert len(_DataManager._get_all(version_number="all")) == 4
-        assert len(_TaskManager._get_all(version_number="all")) == 2
-        assert len(_SequenceManager._get_all(version_number="all")) == 2
-        assert len(_ScenarioManager._get_all(version_number="all")) == 2
-        assert len(_CycleManager._get_all(version_number="all")) == 1
-        assert len(_JobManager._get_all(version_number="all")) == 2
-
-        # Assert number of entities of the latest version only
-        assert len(_DataManager._get_all(version_number="latest")) == 2
-        assert len(_TaskManager._get_all(version_number="latest")) == 1
-        assert len(_SequenceManager._get_all(version_number="latest")) == 1
-        assert len(_ScenarioManager._get_all(version_number="latest")) == 1
-        assert len(_JobManager._get_all(version_number="latest")) == 1
-
-        # Assert number of entities of the development version only
-        assert len(_DataManager._get_all(version_number="development")) == 2
-        assert len(_TaskManager._get_all(version_number="development")) == 1
-        assert len(_SequenceManager._get_all(version_number="development")) == 1
-        assert len(_ScenarioManager._get_all(version_number="development")) == 1
-        assert len(_JobManager._get_all(version_number="development")) == 1
-
-        # Assert number of entities of an unknown version
-        with pytest.raises(NonExistingVersion):
-            assert _DataManager._get_all(version_number="foo")
-        with pytest.raises(NonExistingVersion):
-            assert _TaskManager._get_all(version_number="foo")
-        with pytest.raises(NonExistingVersion):
-            assert _SequenceManager._get_all(version_number="foo")
-        with pytest.raises(NonExistingVersion):
-            assert _ScenarioManager._get_all(version_number="foo")
-        with pytest.raises(NonExistingVersion):
-            assert _JobManager._get_all(version_number="foo")
-        core.stop()
-
-
-def twice_doppelganger(a):
-    return a * 2
-
-
-def test_dev_mode_clean_all_entities_when_config_is_alternated(init_sql_repo):
-    data_node_1_config = Config.configure_data_node(
-        id="d1", storage_type="pickle", default_data="abc", scope=Scope.SCENARIO
-    )
-    data_node_2_config = Config.configure_data_node(id="d2", storage_type="csv", default_path="foo.csv")
-    task_config = Config.configure_task("my_task", twice_doppelganger, data_node_1_config, data_node_2_config)
-    scenario_config = Config.configure_scenario("my_scenario", [task_config], frequency=Frequency.DAILY)
-
-    # Create a scenario in development mode with the doppelganger function
-    with patch("sys.argv", ["prog"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    # Delete the twice_doppelganger function
-    # and clear cache of _load_fct() to simulate a new run
-    del globals()["twice_doppelganger"]
-    _load_fct.cache_clear()
-
-    # Create a scenario in development mode with another function
-    scenario_config = config_scenario()
-    with patch("sys.argv", ["prog"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-
-def test_version_number_when_switching_mode(init_sql_repo):
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-        ver_1 = _VersionManager._get_latest_version()
-        ver_dev = _VersionManager._get_development_version()
-        assert ver_1 == ver_dev
-        assert len(_VersionManager._get_all()) == 1
-        core.stop()
-
-    # Run with dev mode, the version number is the same
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-        ver_2 = _VersionManager._get_latest_version()
-        assert ver_2 == ver_dev
-        assert len(_VersionManager._get_all()) == 1
-        core.stop()
-
-    # When run with experiment mode, a new version is created
-    with patch("sys.argv", ["prog", "--experiment"]):
-        core = Core()
-        core.run()
-        ver_3 = _VersionManager._get_latest_version()
-        assert ver_3 != ver_dev
-        assert len(_VersionManager._get_all()) == 2
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--experiment", "2.1"]):
-        core = Core()
-        core.run()
-        ver_4 = _VersionManager._get_latest_version()
-        assert ver_4 == "2.1"
-        assert len(_VersionManager._get_all()) == 3
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--experiment"]):
-        core = Core()
-        core.run()
-        ver_5 = _VersionManager._get_latest_version()
-        assert ver_5 != ver_3
-        assert ver_5 != ver_4
-        assert ver_5 != ver_dev
-        assert len(_VersionManager._get_all()) == 4
-        core.stop()
-
-    # When run with production mode, the latest version is used as production
-    with patch("sys.argv", ["prog", "--production"]):
-        core = Core()
-        core.run()
-        ver_6 = _VersionManager._get_latest_version()
-        production_versions = _VersionManager._get_production_versions()
-        assert ver_6 == ver_5
-        assert production_versions == [ver_6]
-        assert len(_VersionManager._get_all()) == 4
-        core.stop()
-
-    # When run with production mode, the "2.1" version is used as production
-    with patch("sys.argv", ["prog", "--production", "2.1"]):
-        core = Core()
-        core.run()
-        ver_7 = _VersionManager._get_latest_version()
-        production_versions = _VersionManager._get_production_versions()
-        assert ver_7 == "2.1"
-        assert production_versions == [ver_7, ver_6]
-        assert len(_VersionManager._get_all()) == 4
-        core.stop()
-
-    # Run with dev mode, the version number is the same as the first dev version to override it
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-        ver_7 = _VersionManager._get_latest_version()
-        assert ver_1 == ver_7
-        assert len(_VersionManager._get_all()) == 4
-        core.stop()
-
-
-def test_production_mode_load_all_entities_from_previous_production_version(init_sql_repo):
-    scenario_config = config_scenario()
-
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--production", "1.0"]):
-        core = Core()
-        core.run()
-        production_ver_1 = _VersionManager._get_latest_version()
-        assert _VersionManager._get_production_versions() == [production_ver_1]
-        # When run production mode on a new app, a dev version is created alongside
-        assert _VersionManager._get_development_version() not in _VersionManager._get_production_versions()
-        assert len(_VersionManager._get_all()) == 2
-
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-
-        assert len(_DataManager._get_all()) == 2
-        assert len(_TaskManager._get_all()) == 1
-        assert len(_SequenceManager._get_all()) == 1
-        assert len(_ScenarioManager._get_all()) == 1
-        assert len(_CycleManager._get_all()) == 1
-        assert len(_JobManager._get_all()) == 1
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--production", "2.0"]):
-        core = Core()
-        core.run()
-        production_ver_2 = _VersionManager._get_latest_version()
-        assert _VersionManager._get_production_versions() == [production_ver_1, production_ver_2]
-        assert len(_VersionManager._get_all()) == 3
-
-        # All entities from previous production version should be saved
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-
-        assert len(_DataManager._get_all()) == 4
-        assert len(_TaskManager._get_all()) == 2
-        assert len(_SequenceManager._get_all()) == 2
-        assert len(_ScenarioManager._get_all()) == 2
-        assert len(_CycleManager._get_all()) == 1
-        assert len(_JobManager._get_all()) == 2
-        core.stop()
-
-
-def test_force_override_experiment_version(init_sql_repo):
-    scenario_config = config_scenario()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        core = Core()
-        core.run()
-        ver_1 = _VersionManager._get_latest_version()
-        assert ver_1 == "1.0"
-        # When create new experiment version, a development version entity is also created as a placeholder
-        assert len(_VersionManager._get_all()) == 2  # 2 version include 1 experiment 1 development
-
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-
-        assert len(_DataManager._get_all()) == 2
-        assert len(_TaskManager._get_all()) == 1
-        assert len(_SequenceManager._get_all()) == 1
-        assert len(_ScenarioManager._get_all()) == 1
-        assert len(_CycleManager._get_all()) == 1
-        assert len(_JobManager._get_all()) == 1
-        core.stop()
-
-    Config.configure_global_app(foo="bar")
-
-    # Without --taipy-force parameter, a SystemExit will be raised
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-            core = Core()
-            core.run()
-    core.stop()
-
-    # With --taipy-force parameter
-    with patch("sys.argv", ["prog", "--experiment", "1.0", "--taipy-force"]):
-        core = Core()
-        core.run()
-        core.stop()
-    ver_2 = _VersionManager._get_latest_version()
-    assert ver_2 == "1.0"
-    assert len(_VersionManager._get_all()) == 2  # 2 version include 1 experiment 1 development
-
-    # All entities from previous submit should be saved
-    scenario = _ScenarioManager._create(scenario_config)
-    _ScenarioManager._submit(scenario)
-
-    assert len(_DataManager._get_all()) == 4
-    assert len(_TaskManager._get_all()) == 2
-    assert len(_SequenceManager._get_all()) == 2
-    assert len(_ScenarioManager._get_all()) == 2
-    assert len(_CycleManager._get_all()) == 1
-    assert len(_JobManager._get_all()) == 2
-
-
-def test_force_override_production_version(init_sql_repo):
-    scenario_config = config_scenario()
-
-    with patch("sys.argv", ["prog", "--production", "1.0"]):
-        core = Core()
-        core.run()
-        ver_1 = _VersionManager._get_latest_version()
-        production_versions = _VersionManager._get_production_versions()
-        assert ver_1 == "1.0"
-        assert production_versions == ["1.0"]
-        # When create new production version, a development version entity is also created as a placeholder
-        assert len(_VersionManager._get_all()) == 2  # 2 version include 1 production 1 development
-
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-
-        assert len(_DataManager._get_all()) == 2
-        assert len(_TaskManager._get_all()) == 1
-        assert len(_SequenceManager._get_all()) == 1
-        assert len(_ScenarioManager._get_all()) == 1
-        assert len(_CycleManager._get_all()) == 1
-        assert len(_JobManager._get_all()) == 1
-        core.stop()
-
-    Config.configure_global_app(foo="bar")
-
-    # Without --taipy-force parameter, a SystemExit will be raised
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "--production", "1.0"]):
-            core = Core()
-            core.run()
-    core.stop()
-
-    # With --taipy-force parameter
-    with patch("sys.argv", ["prog", "--production", "1.0", "--taipy-force"]):
-        core = Core()
-        core.run()
-        ver_2 = _VersionManager._get_latest_version()
-        assert ver_2 == "1.0"
-        assert len(_VersionManager._get_all()) == 2  # 2 version include 1 production 1 development
-
-        # All entities from previous submit should be saved
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-
-        assert len(_DataManager._get_all()) == 4
-        assert len(_TaskManager._get_all()) == 2
-        assert len(_SequenceManager._get_all()) == 2
-        assert len(_ScenarioManager._get_all()) == 2
-        assert len(_CycleManager._get_all()) == 1
-        assert len(_JobManager._get_all()) == 2
-        core.stop()
-
-
-def test_modify_config_properties_without_force(caplog, init_sql_repo, init_config):
-    _ = config_scenario()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        core = Core()
-        core.run()
-        core.stop()
-
-    init_config()
-    Config.configure_core(repository_type="sql", repository_properties={"db_location": init_sql_repo})
-
-    _ = config_scenario_2()
-
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-            core = Core()
-            core.run()
-    core.stop()
-
-    error_message = str(caplog.text)
-
-    assert 'DATA_NODE "d3" was added' in error_message
-    assert 'JOB "max_nb_of_workers" was added' in error_message
-
-    assert 'DATA_NODE "d0" was removed' in error_message
-
-    assert 'DATA_NODE "d2" has attribute "default_path" modified' in error_message
-    assert 'CORE "root_folder" was modified' in error_message
-    assert 'JOB "mode" was modified' in error_message
-    assert 'SCENARIO "my_scenario" has attribute "frequency" modified' in error_message
-    assert 'SCENARIO "my_scenario" has attribute "tasks" modified' in error_message
-    assert 'TASK "my_task" has attribute "inputs" modified' in error_message
-    assert 'TASK "my_task" has attribute "function" modified' in error_message
-    assert 'TASK "my_task" has attribute "outputs" modified' in error_message
-    assert 'DATA_NODE "d2" has attribute "has_header" modified' in error_message
-    assert 'DATA_NODE "d2" has attribute "exposed_type" modified' in error_message
-
-
-def test_modify_job_configuration_dont_stop_application(caplog, init_sql_repo, init_config):
-    _ = config_scenario()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        Config.configure_job_executions(mode="development")
-        core = Core()
-        core.run(force_restart=True)
-        core.stop()
-
-    init_config()
-    Config.configure_core(repository_type="sql", repository_properties={"db_location": init_sql_repo})
-
-    _ = config_scenario()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        Config.configure_job_executions(mode="standalone", max_nb_of_workers=3)
-        core = Core()
-        core.run(force_restart=True)
-        error_message = str(caplog.text)
-        assert 'JOB "mode" was modified' in error_message
-        assert 'JOB "max_nb_of_workers" was added' in error_message
-        core.stop()
-
-
-def twice(a):
-    return a * 2
-
-
-def config_scenario():
-    Config.configure_data_node(id="d0")
-    data_node_1_config = Config.configure_data_node(
-        id="d1", storage_type="pickle", default_data="abc", scope=Scope.SCENARIO
-    )
-    data_node_2_config = Config.configure_data_node(id="d2", storage_type="csv", default_path="foo.csv")
-    task_config = Config.configure_task("my_task", twice, data_node_1_config, data_node_2_config)
-    scenario_config = Config.configure_scenario("my_scenario", [task_config], frequency=Frequency.DAILY)
-    scenario_config.add_sequences({"my_sequence": [task_config]})
-
-    return scenario_config
-
-
-def double_twice(a):
-    return a * 2, a * 2
-
-
-def config_scenario_2():
-    Config.configure_core(
-        root_folder="foo_root",
-        # Changing the "storage_folder" will fail since older versions are stored in older folder
-        # storage_folder="foo_storage",
-    )
-    Config.configure_job_executions(mode="standalone", max_nb_of_workers=3)
-    data_node_1_config = Config.configure_data_node(
-        id="d1", storage_type="pickle", default_data="abc", scope=Scope.SCENARIO
-    )
-    # Modify properties of "d2"
-    data_node_2_config = Config.configure_data_node(
-        id="d2", storage_type="csv", default_path="bar.csv", has_header=False, exposed_type="numpy"
-    )
-    # Add new data node "d3"
-    data_node_3_config = Config.configure_data_node(
-        id="d3", storage_type="csv", default_path="baz.csv", has_header=False, exposed_type="numpy"
-    )
-    # Modify properties of "my_task", including the function and outputs list
-    Config.configure_task("my_task", double_twice, data_node_3_config, [data_node_1_config, data_node_2_config])
-    task_config_1 = Config.configure_task("my_task_1", double_twice, data_node_3_config, [data_node_2_config])
-    # Modify properties of "my_scenario", where tasks is now my_task_1
-    scenario_config = Config.configure_scenario("my_scenario", [task_config_1], frequency=Frequency.MONTHLY)
-    scenario_config.add_sequences({"my_sequence": [task_config_1]})
-
-    return scenario_config

+ 0 - 306
tests/core/version/test_version_cli_with_sql_repo.py

@@ -1,306 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-from time import sleep
-from unittest.mock import patch
-
-import pytest
-
-from taipy.config.common.frequency import Frequency
-from taipy.config.common.scope import Scope
-from taipy.config.config import Config
-from taipy.core import Core
-from taipy.core._version._cli._version_cli import _VersionCLI
-from taipy.core._version._version_manager import _VersionManager
-from taipy.core.data._data_manager import _DataManager
-from taipy.core.job._job_manager import _JobManager
-from taipy.core.scenario._scenario_manager import _ScenarioManager
-from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory
-from taipy.core.sequence._sequence_manager import _SequenceManager
-from taipy.core.task._task_manager import _TaskManager
-
-
-def test_delete_version(caplog, init_sql_repo):
-    _ScenarioManagerFactory._build_manager()
-
-    scenario_config = config_scenario()
-
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.1"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--production", "1.1"]):
-        core = Core()
-        core.run()
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--experiment", "2.0"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--experiment", "2.1"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--production", "2.1"]):
-        core = Core()
-        core.run()
-        core.stop()
-
-    all_versions = [version.id for version in _VersionManager._get_all()]
-    production_version = _VersionManager._get_production_versions()
-    assert len(all_versions) == 5
-    assert len(production_version) == 2
-    assert "1.0" in all_versions
-    assert "1.1" in all_versions and "1.1" in production_version
-    assert "2.0" in all_versions
-    assert "2.1" in all_versions and "2.1" in production_version
-
-    _VersionCLI.create_parser()
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--delete", "1.0"]):
-            _VersionCLI.handle_command()
-
-    assert "Successfully delete version 1.0." in caplog.text
-    all_versions = [version.id for version in _VersionManager._get_all()]
-    assert len(all_versions) == 4
-    assert "1.0" not in all_versions
-
-    # Test delete a non-existed version
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--delete", "non_exist_version"]):
-            _VersionCLI.handle_command()
-    assert "Version 'non_exist_version' does not exist." in caplog.text
-
-    # Test delete production version will change the version from production to experiment
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--delete-production", "1.1"]):
-            _VersionCLI.handle_command()
-
-    assert "Successfully delete version 1.1 from the production version list." in caplog.text
-    all_versions = [version.id for version in _VersionManager._get_all()]
-    production_version = _VersionManager._get_production_versions()
-    assert len(all_versions) == 4
-    assert "1.1" in all_versions and "1.1" not in production_version
-
-    # Test delete a non-existed production version
-    with pytest.raises(SystemExit) as e:
-        with patch("sys.argv", ["prog", "manage-versions", "--delete-production", "non_exist_version"]):
-            _VersionCLI.handle_command()
-
-    assert str(e.value) == "Version 'non_exist_version' is not a production version."
-
-
-def test_list_versions(capsys, init_sql_repo):
-    _ScenarioManagerFactory._build_manager()
-
-    with patch("sys.argv", ["prog", "--development"]):
-        core = Core()
-        core.run()
-        core.stop()
-    sleep(0.05)
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        core = Core()
-        core.run()
-        core.stop()
-    sleep(0.05)
-    with patch("sys.argv", ["prog", "--experiment", "1.1"]):
-        core = Core()
-        core.run()
-        core.stop()
-    sleep(0.05)
-    with patch("sys.argv", ["prog", "--production", "1.1"]):
-        core = Core()
-        core.run()
-        core.stop()
-    sleep(0.05)
-    with patch("sys.argv", ["prog", "--experiment", "2.0"]):
-        core = Core()
-        core.run()
-        core.stop()
-    sleep(0.05)
-    with patch("sys.argv", ["prog", "--experiment", "2.1"]):
-        core = Core()
-        core.run()
-        core.stop()
-    sleep(0.05)
-    with patch("sys.argv", ["prog", "--production", "2.1"]):
-        core = Core()
-        core.run()
-        core.stop()
-
-    _VersionCLI.create_parser()
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--list"]):
-            _VersionCLI.handle_command()
-
-    out, _ = capsys.readouterr()
-    version_list = str(out).strip().split("\n")
-    assert len(version_list) == 6  # 5 versions with the header
-    assert all(column in version_list[0] for column in ["Version number", "Mode", "Creation date"])
-    assert all(column in version_list[1] for column in ["2.1", "Production", "latest"])
-    assert all(column in version_list[2] for column in ["2.0", "Experiment"]) and "latest" not in version_list[2]
-    assert all(column in version_list[3] for column in ["1.1", "Production"]) and "latest" not in version_list[3]
-    assert all(column in version_list[4] for column in ["1.0", "Experiment"]) and "latest" not in version_list[4]
-    assert "Development" in version_list[5] and "latest" not in version_list[5]
-
-
-def test_rename_version(caplog, init_sql_repo):
-    _ScenarioManagerFactory._build_manager()
-
-    scenario_config = config_scenario()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    with patch("sys.argv", ["prog", "--production", "2.0"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    dev_ver = _VersionManager._get_development_version()
-
-    _VersionCLI.create_parser()
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--rename", "non_exist_version", "1.1"]):
-            # This should raise an exception since version "non_exist_version" does not exist
-            _VersionCLI.handle_command()
-    assert "Version 'non_exist_version' does not exist." in caplog.text
-
-    _VersionCLI.create_parser()
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--rename", "1.0", "2.0"]):
-            # This should raise an exception since 2.0 already exists
-            _VersionCLI.handle_command()
-    assert "Version name '2.0' is already used." in caplog.text
-
-    _VersionCLI.create_parser()
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--rename", "1.0", "1.1"]):
-            _VersionCLI.handle_command()
-    assert _VersionManager._get("1.0") is None
-    assert [version.id for version in _VersionManager._get_all()].sort() == [dev_ver, "1.1", "2.0"].sort()
-    # All entities are assigned to the new version
-    assert len(_DataManager._get_all("1.1")) == 2
-    assert len(_TaskManager._get_all("1.1")) == 1
-    assert len(_SequenceManager._get_all("1.1")) == 0
-    assert len(_ScenarioManager._get_all("1.1")) == 1
-    assert len(_JobManager._get_all("1.1")) == 1
-
-    _VersionCLI.create_parser()
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--rename", "2.0", "2.1"]):
-            _VersionCLI.handle_command()
-    assert _VersionManager._get("2.0") is None
-    assert [version.id for version in _VersionManager._get_all()].sort() == [dev_ver, "1.1", "2.1"].sort()
-    assert _VersionManager._get_production_versions() == ["2.1"]
-    # All entities are assigned to the new version
-    assert len(_DataManager._get_all("2.1")) == 2
-    assert len(_TaskManager._get_all("2.1")) == 1
-    assert len(_SequenceManager._get_all("2.1")) == 0
-    assert len(_ScenarioManager._get_all("2.1")) == 1
-    assert len(_JobManager._get_all("2.1")) == 1
-
-
-def test_compare_version_config(caplog, init_sql_repo, init_config):
-    _ScenarioManagerFactory._build_manager()
-
-    scenario_config_1 = config_scenario()
-
-    with patch("sys.argv", ["prog", "--experiment", "1.0"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config_1)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    init_config()
-    Config.configure_core(repository_type="sql", repository_properties={"db_location": init_sql_repo})
-    _ScenarioManagerFactory._build_manager()
-
-    scenario_config_2 = config_scenario()
-    Config.configure_data_node(id="d2", storage_type="csv", default_path="bar.csv")
-
-    with patch("sys.argv", ["prog", "--experiment", "2.0"]):
-        core = Core()
-        core.run()
-        scenario = _ScenarioManager._create(scenario_config_2)
-        _ScenarioManager._submit(scenario)
-        core.stop()
-
-    _VersionCLI.create_parser()
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--compare-config", "non_exist_version", "2.0"]):
-            # This should raise an exception since version "non_exist_version" does not exist
-            _VersionCLI.handle_command()
-    assert "Version 'non_exist_version' does not exist." in caplog.text
-
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--compare-config", "1.0", "non_exist_version"]):
-            # This should raise an exception since 2.0 already exists
-            _VersionCLI.handle_command()
-    assert "Version 'non_exist_version' does not exist." in caplog.text
-
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--compare-config", "1.0", "1.0"]):
-            _VersionCLI.handle_command()
-    assert "There is no difference between version 1.0 Configuration and version 1.0 Configuration." in caplog.text
-
-    with pytest.raises(SystemExit):
-        with patch("sys.argv", ["prog", "manage-versions", "--compare-config", "1.0", "2.0"]):
-            _VersionCLI.handle_command()
-    expected_message = """Differences between version 1.0 Configuration and version 2.0 Configuration:
-\tDATA_NODE "d2" has attribute "default_path" modified: foo.csv -> bar.csv"""
-    assert expected_message in caplog.text
-
-
-def twice(a):
-    return a * 2
-
-
-def config_scenario():
-    data_node_1_config = Config.configure_data_node(
-        id="d1", storage_type="pickle", default_data="abc", scope=Scope.SCENARIO
-    )
-    data_node_2_config = Config.configure_data_node(id="d2", storage_type="csv", default_path="foo.csv")
-    task_config = Config.configure_task("my_task", twice, data_node_1_config, data_node_2_config)
-    scenario_config = Config.configure_scenario("my_scenario", [task_config], frequency=Frequency.DAILY)
-
-    return scenario_config

+ 19 - 29
tests/core/version/test_version_repositories.py

@@ -15,30 +15,26 @@ import pytest
 
 from taipy.core._version._version import _Version
 from taipy.core._version._version_fs_repository import _VersionFSRepository
-from taipy.core._version._version_sql_repository import _VersionSQLRepository
 from taipy.core.exceptions import ModelNotFound
 
 
 class TestVersionFSRepository:
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_save_and_load(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_save_and_load(self, _version):
+        repository = _VersionFSRepository()
         repository._save(_version)
 
         obj = repository._load(_version.id)
         assert isinstance(obj, _Version)
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_exists(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_exists(self, _version):
+        repository = _VersionFSRepository()
         repository._save(_version)
 
         assert repository._exists(_version.id)
         assert not repository._exists("not-existed-version")
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_load_all(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_load_all(self, _version):
+        repository = _VersionFSRepository()
         for i in range(10):
             _version.id = f"_version_{i}"
             repository._save(_version)
@@ -46,9 +42,8 @@ class TestVersionFSRepository:
 
         assert len(data_nodes) == 10
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_load_all_with_filters(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_load_all_with_filters(self, _version):
+        repository = _VersionFSRepository()
 
         for i in range(10):
             _version.id = f"_version_{i}"
@@ -58,9 +53,8 @@ class TestVersionFSRepository:
 
         assert len(objs) == 1
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_delete(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_delete(self, _version):
+        repository = _VersionFSRepository()
         repository._save(_version)
 
         repository._delete(_version.id)
@@ -68,9 +62,8 @@ class TestVersionFSRepository:
         with pytest.raises(ModelNotFound):
             repository._load(_version.id)
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_delete_all(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_delete_all(self, _version):
+        repository = _VersionFSRepository()
 
         for i in range(10):
             _version.id = f"_version_{i}"
@@ -82,9 +75,8 @@ class TestVersionFSRepository:
 
         assert len(repository._load_all()) == 0
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_delete_many(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_delete_many(self, _version):
+        repository = _VersionFSRepository()
 
         for i in range(10):
             _version.id = f"_version_{i}"
@@ -97,9 +89,8 @@ class TestVersionFSRepository:
 
         assert len(repository._load_all()) == 7
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_search(self, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_search(self, _version):
+        repository = _VersionFSRepository()
 
         for i in range(10):
             _version.id = f"_version_{i}"
@@ -112,12 +103,11 @@ class TestVersionFSRepository:
         assert len(objs) == 1
         assert isinstance(objs[0], _Version)
 
-    @pytest.mark.parametrize("repo", [_VersionFSRepository, _VersionSQLRepository])
-    def test_export(self, tmpdir, _version, repo, init_sql_repo):
-        repository = repo()
+    def test_export(self, tmpdir, _version):
+        repository = _VersionFSRepository()
         repository._save(_version)
 
         repository._export(_version.id, tmpdir.strpath)
-        dir_path = repository.dir_path if repo == _VersionFSRepository else os.path.join(tmpdir.strpath, "version")
+        dir_path = repository.dir_path
 
         assert os.path.exists(os.path.join(dir_path, f"{_version.id}.json"))

+ 80 - 23
tests/gui/data/test_array_dict_data_accessor.py

@@ -9,6 +9,7 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+import os
 from importlib import util
 
 from taipy.gui import Gui
@@ -20,8 +21,8 @@ an_array = [1, 2, 3]
 
 
 def test_simple_data(gui: Gui, helpers):
-    accessor = _ArrayDictDataAccessor()
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
+    accessor = _ArrayDictDataAccessor(gui)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -32,8 +33,8 @@ def test_simple_data(gui: Gui, helpers):
 
 def test_simple_data_with_arrow(gui: Gui, helpers):
     if util.find_spec("pyarrow"):
-        accessor = _ArrayDictDataAccessor()
-        ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
+        accessor = _ArrayDictDataAccessor(gui)
+        ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
         assert ret_data
         value = ret_data["value"]
         assert value
@@ -43,29 +44,29 @@ def test_simple_data_with_arrow(gui: Gui, helpers):
 
 
 def test_slice(gui: Gui, helpers):
-    accessor = _ArrayDictDataAccessor()
-    value = accessor.get_data(gui, "x", an_array, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
+    accessor = _ArrayDictDataAccessor(gui)
+    value = accessor.get_data("x", an_array, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     assert len(data) == 2
-    value = accessor.get_data(gui, "x", an_array, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", an_array, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
     data = value["data"]
     assert len(data) == 2
 
 
 def test_sort(gui: Gui, helpers):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     a_dict = {"name": ["A", "B", "C"], "value": [3, 2, 1]}
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "orderby": "name", "sort": "desc"}
-    data = accessor.get_data(gui, "x", a_dict, query, _DataFormat.JSON)["value"]["data"]
+    data = accessor.get_data("x", a_dict, query, _DataFormat.JSON)["value"]["data"]
     assert data[0]["name"] == "C"
 
 
 def test_aggregate(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     a_dict = {"name": ["A", "B", "C", "A"], "value": [3, 2, 1, 2]}
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "aggregates": ["name"], "applies": {"value": "sum"}}
-    value = accessor.get_data(gui, "x", a_dict, query, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", a_dict, query, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     agregValue = next(v.get("value") for v in data if v.get("name") == "A")
@@ -73,9 +74,9 @@ def test_aggregate(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_array(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array = [[1, 2, 3], [2, 4, 6]]
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -86,9 +87,9 @@ def test_array_of_array(gui: Gui, helpers, small_dataframe):
 
 
 def test_empty_array(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array: list[str] = []
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -98,9 +99,9 @@ def test_empty_array(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_diff_array(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array = [[1, 2, 3], [2, 4]]
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -112,7 +113,7 @@ def test_array_of_diff_array(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_dicts(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array_of_dicts = [
         {
             "temperatures": [
@@ -126,9 +127,7 @@ def test_array_of_dicts(gui: Gui, helpers, small_dataframe):
         },
         {"seasons": ["Winter", "Summer", "Spring", "Autumn"]},
     ]
-    ret_data = accessor.get_data(
-        gui, "x", an_array_of_dicts, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON
-    )
+    ret_data = accessor.get_data("x", an_array_of_dicts, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -140,7 +139,7 @@ def test_array_of_dicts(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_Mapdicts(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     dict1 = _MapDict(
         {
             "temperatures": [
@@ -154,7 +153,7 @@ def test_array_of_Mapdicts(gui: Gui, helpers, small_dataframe):
         }
     )
     dict2 = _MapDict({"seasons": ["Winter", "Summer", "Spring", "Autumn"]})
-    ret_data = accessor.get_data(gui, "x", [dict1, dict2], {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", [dict1, dict2], {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -163,3 +162,61 @@ def test_array_of_Mapdicts(gui: Gui, helpers, small_dataframe):
     assert len(data) == 2
     assert len(data[0]["temperatures"]) == 5
     assert len(data[1]["seasons"]) == 4
+
+
+def test_edit_dict(gui, small_dataframe):
+    accessor = _ArrayDictDataAccessor(gui)
+    pd = small_dataframe
+    ln = len(pd["name"])
+    assert pd["value"][0] != 10
+    ret_data = accessor.on_edit(pd, {"index": 0, "col": "value", "value": 10})
+    assert isinstance(ret_data, dict)
+    assert len(ret_data["name"]) == ln
+    assert ret_data["value"][0] == 10
+
+
+def test_delete_dict(gui, small_dataframe):
+    accessor = _ArrayDictDataAccessor(gui)
+    pd = small_dataframe
+    ln = len(pd['name'])
+    ret_data = accessor.on_delete(pd, {"index": 0})
+    assert isinstance(ret_data, dict)
+    assert len(ret_data["name"]) == ln - 1
+
+
+def test_add_dict(gui, small_dataframe):
+    accessor = _ArrayDictDataAccessor(gui)
+    pd = small_dataframe
+    ln = len(pd["name"])
+
+    ret_data = accessor.on_add(pd, {"index": 0})
+    assert isinstance(ret_data, dict)
+    assert len(ret_data["name"]) == ln + 1
+    assert ret_data["value"][0] == 0
+    assert ret_data["name"][0] == ""
+
+    ret_data = accessor.on_add(pd, {"index": 2})
+    assert isinstance(ret_data, dict)
+    assert len(ret_data["name"]) == ln + 1
+    assert ret_data["value"][2] == 0
+    assert ret_data["name"][2] == ""
+
+    ret_data = accessor.on_add(pd, {"index": 0}, ["New", 100])
+    assert isinstance(ret_data, dict)
+    assert len(ret_data["name"]) == ln + 1
+    assert ret_data["value"][0] == 100
+    assert ret_data["name"][0] == "New"
+
+    ret_data = accessor.on_add(pd, {"index": 2}, ["New", 100])
+    assert isinstance(ret_data, dict)
+    assert len(ret_data["name"]) == ln + 1
+    assert ret_data["value"][2] == 100
+    assert ret_data["name"][2] == "New"
+
+
+def test_csv(gui, small_dataframe):
+    accessor = _ArrayDictDataAccessor(gui)
+    pd = small_dataframe
+    path = accessor.to_csv("", pd)
+    assert path is not None
+    assert os.path.getsize(path) > 0

+ 86 - 28
tests/gui/data/test_pandas_data_accessor.py

@@ -10,10 +10,11 @@
 # specific language governing permissions and limitations under the License.
 
 import inspect
+import os
 from datetime import datetime
 from importlib import util
 
-import pandas  # type: ignore
+import pandas
 from flask import g
 
 from taipy.gui import Gui
@@ -23,9 +24,9 @@ from taipy.gui.data.pandas_data_accessor import _PandasDataAccessor
 
 
 def test_simple_data(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
-    ret_data = accessor.get_data(gui, "x", pd, {"start": 0, "end": -1}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", pd, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -36,9 +37,9 @@ def test_simple_data(gui: Gui, helpers, small_dataframe):
 
 def test_simple_data_with_arrow(gui: Gui, helpers, small_dataframe):
     if util.find_spec("pyarrow"):
-        accessor = _PandasDataAccessor()
+        accessor = _PandasDataAccessor(gui)
         pd = pandas.DataFrame(data=small_dataframe)
-        ret_data = accessor.get_data(gui, "x", pd, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
+        ret_data = accessor.get_data("x", pd, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
         assert ret_data
         value = ret_data["value"]
         assert value
@@ -48,9 +49,9 @@ def test_simple_data_with_arrow(gui: Gui, helpers, small_dataframe):
 
 
 def test_get_all_simple_data(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
-    ret_data = accessor.get_data(gui, "x", pd, {"alldata": True}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", pd, {"alldata": True}, _DataFormat.JSON)
     assert ret_data
     assert ret_data["alldata"] is True
     value = ret_data["value"]
@@ -60,40 +61,40 @@ def test_get_all_simple_data(gui: Gui, helpers, small_dataframe):
 
 
 def test_slice(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
-    value = accessor.get_data(gui, "x", pd, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", pd, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     assert len(data) == 2
-    value = accessor.get_data(gui, "x", pd, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", pd, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
     data = value["data"]
     assert len(data) == 2
 
 
 def test_sort(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "orderby": "name", "sort": "desc"}
-    data = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)["value"]["data"]
+    data = accessor.get_data("x", pd, query, _DataFormat.JSON)["value"]["data"]
     assert data[0]["name"] == "C"
 
 
 def test_aggregate(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     pd = pandas.concat(
         [pd, pandas.DataFrame(data={"name": ["A"], "value": [4]})], axis=0, join="outer", ignore_index=True
     )
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "aggregates": ["name"], "applies": {"value": "sum"}}
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     assert next(v.get("value") for v in data if v.get("name") == "A") == 5
 
 
 def test_filters(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     pd = pandas.concat(
         [pd, pandas.DataFrame(data={"name": ["A"], "value": [4]})], axis=0, join="outer", ignore_index=True
@@ -104,7 +105,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "!=", "value": ""}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 4
 
     query = {
@@ -113,7 +114,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "==", "value": ""}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
 
     query = {
@@ -122,7 +123,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "==", "value": "A"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 2
 
     query = {
@@ -131,7 +132,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "==", "value": "A"}, {"col": "value", "action": "==", "value": 2}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
 
     query = {
@@ -140,13 +141,13 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "!=", "value": "A"}, {"col": "value", "action": "==", "value": 2}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 1
     assert value["value"]["data"][0]["_tp_index"] == 1
 
 
 def test_filter_by_date(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     pd["a date"] = [
         datetime.fromisocalendar(2022, 28, 1),
@@ -159,7 +160,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 3).isoformat() + "Z"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
     query = {
         "columns": ["name", "value"],
@@ -167,7 +168,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 2).isoformat() + "Z"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 1
     query = {
         "columns": ["name", "value"],
@@ -175,7 +176,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "a date", "action": "<", "value": datetime.fromisocalendar(2022, 28, 3).isoformat() + "Z"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 2
     query = {
         "columns": ["name", "value"],
@@ -186,7 +187,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
             {"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 2).isoformat() + "Z"},
         ],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
     query = {
         "columns": ["name", "value"],
@@ -197,14 +198,14 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
             {"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 1).isoformat() + "Z"},
         ],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 1
 
 
 def test_decimator(gui: Gui, helpers, small_dataframe):
     a_decimator = ScatterDecimator()  # noqa: F841
 
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
 
     # set gui frame
@@ -221,7 +222,6 @@ def test_decimator(gui: Gui, helpers, small_dataframe):
         g.client_id = cid
 
         ret_data = accessor.get_data(
-            gui,
             "x",
             pd,
             {
@@ -240,3 +240,61 @@ def test_decimator(gui: Gui, helpers, small_dataframe):
         assert value
         data = value["data"]
         assert len(data) == 2
+
+
+def test_edit(gui, small_dataframe):
+    accessor = _PandasDataAccessor(gui)
+    pd = pandas.DataFrame(small_dataframe)
+    ln = len(pd)
+    assert pd["value"].iloc[0] != 10
+    ret_data = accessor.on_edit(pd, {"index": 0, "col": "value", "value": 10})
+    assert isinstance(ret_data, pandas.DataFrame)
+    assert len(ret_data) == ln
+    assert ret_data["value"].iloc[0] == 10
+
+
+def test_delete(gui, small_dataframe):
+    accessor = _PandasDataAccessor(gui)
+    pd = pandas.DataFrame(small_dataframe)
+    ln = len(pd)
+    ret_data = accessor.on_delete(pd, {"index": 0})
+    assert isinstance(ret_data, pandas.DataFrame)
+    assert len(ret_data) == ln - 1
+
+
+def test_add(gui, small_dataframe):
+    accessor = _PandasDataAccessor(gui)
+    pd = pandas.DataFrame(small_dataframe)
+    ln = len(pd)
+
+    ret_data = accessor.on_add(pd, {"index": 0})
+    assert isinstance(ret_data, pandas.DataFrame)
+    assert len(ret_data) == ln + 1
+    assert ret_data["value"].iloc[0] == 0
+    assert ret_data["name"].iloc[0] == ""
+
+    ret_data = accessor.on_add(pd, {"index": 2})
+    assert isinstance(ret_data, pandas.DataFrame)
+    assert len(ret_data) == ln + 1
+    assert ret_data["value"].iloc[2] == 0
+    assert ret_data["name"].iloc[2] == ""
+
+    ret_data = accessor.on_add(pd, {"index": 0}, ["New", 100])
+    assert isinstance(ret_data, pandas.DataFrame)
+    assert len(ret_data) == ln + 1
+    assert ret_data["value"].iloc[0] == 100
+    assert ret_data["name"].iloc[0] == "New"
+
+    ret_data = accessor.on_add(pd, {"index": 2}, ["New", 100])
+    assert isinstance(ret_data, pandas.DataFrame)
+    assert len(ret_data) == ln + 1
+    assert ret_data["value"].iloc[2] == 100
+    assert ret_data["name"].iloc[2] == "New"
+
+
+def test_csv(gui, small_dataframe):
+    accessor = _PandasDataAccessor(gui)
+    pd = pandas.DataFrame(small_dataframe)
+    path = accessor.to_csv("", pd)
+    assert path is not None
+    assert os.path.getsize(path) > 0

+ 3 - 3
tests/templates/test_template_cli.py

@@ -18,14 +18,14 @@ from taipy._entrypoint import _entrypoint
 
 
 def test_create_cli_with_wrong_arguments(caplog):
-    with patch("sys.argv", ["prog", "create", "--teamplaet", "default"]):
+    with patch("sys.argv", ["prog", "create", "--applciation", "default"]):
         with pytest.raises(SystemExit):
             _entrypoint()
-        assert "Unknown arguments: --teamplaet. Did you mean: --template?" in caplog.text
+        assert "Unknown arguments: --applciation. Did you mean: --application?" in caplog.text
 
 
 def test_create_cli_with_unsupported_template(capsys):
-    with patch("sys.argv", ["prog", "create", "--template", "not-a-template"]):
+    with patch("sys.argv", ["prog", "create", "--application", "not-a-template"]):
         with pytest.raises(SystemExit):
             _entrypoint()
         _, err = capsys.readouterr()

+ 1 - 1
tools/gui/generate_pyi.py

@@ -59,7 +59,7 @@ with open("./taipy/gui/version.json", "r") as vfile:
         current_version = "develop"
     else:
         current_version = f'release-{version.get("major", 0)}.{version.get("minor", 0)}'
-taipy_doc_url = f"https://docs.taipy.io/en/{current_version}/manuals/userman/gui/viselements/standard-and-blocks/"
+taipy_doc_url = f"https://docs.taipy.io/en/{current_version}/manuals/userman/gui/viselements/generic/"
 
 
 # ############################################################