Browse Source

Merge branch 'develop' into fix/#1198-DataNode.is_up_to_date-raises-error-when-never-written-before

Đỗ Trường Giang 1 year ago
parent
commit
d4cf6e71ad
100 changed files with 2795 additions and 1324 deletions
  1. 1 1
      .github/actions/install/action.yml
  2. 1 0
      .github/workflows/build-and-release-single-package.yml
  3. 1 1
      .github/workflows/frontend.yml
  4. 2 2
      .github/workflows/overall-tests.yml
  5. 1 1
      .github/workflows/packaging.yml
  6. 6 6
      .github/workflows/partial-tests.yml
  7. 1 1
      .github/workflows/publish.yml
  8. 5 2
      README.md
  9. 25 0
      SECURITY.md
  10. 1 0
      contributors.txt
  11. 70 70
      frontend/taipy-gui/package-lock.json
  12. 2 1
      frontend/taipy-gui/packaging/taipy-gui.d.ts
  13. 3 1
      frontend/taipy-gui/src/components/Taipy/AutoLoadingTable.tsx
  14. 1 1
      frontend/taipy-gui/src/components/Taipy/Chart.tsx
  15. 3 1
      frontend/taipy-gui/src/components/Taipy/PaginatedTable.tsx
  16. 8 9
      frontend/taipy-gui/src/components/pages/TaipyRendered.tsx
  17. 24 9
      frontend/taipy-gui/src/context/taipyReducers.ts
  18. 1 1
      frontend/taipy-gui/src/workers/fileupload.worker.ts
  19. 55 55
      frontend/taipy/package-lock.json
  20. 48 44
      frontend/taipy/src/CoreSelector.tsx
  21. 7 1
      frontend/taipy/src/DataNodeTable.tsx
  22. 341 209
      frontend/taipy/src/DataNodeViewer.tsx
  23. 3 1
      frontend/taipy/src/NodeSelector.tsx
  24. 9 7
      frontend/taipy/src/PropertiesEditor.tsx
  25. 20 7
      frontend/taipy/src/ScenarioSelector.tsx
  26. 3 1
      frontend/taipy/src/utils.ts
  27. 7 2
      taipy/_cli/_scaffold_cli.py
  28. 6 4
      taipy/core/_entity/_properties.py
  29. 7 0
      taipy/core/_manager/_manager.py
  30. 28 0
      taipy/core/_repository/_abstract_repository.py
  31. 15 0
      taipy/core/_version/_version_manager.py
  32. 3 2
      taipy/core/_version/_version_manager_factory.py
  33. 97 84
      taipy/core/config/config.schema.json
  34. 1 2
      taipy/core/cycle/_cycle_manager_factory.py
  35. 1 1
      taipy/core/cycle/cycle.py
  36. 0 35
      taipy/core/data/_abstract_file.py
  37. 1 1
      taipy/core/data/_abstract_sql.py
  38. 29 12
      taipy/core/data/_data_manager.py
  39. 1 2
      taipy/core/data/_data_manager_factory.py
  40. 94 0
      taipy/core/data/_file_datanode_mixin.py
  41. 0 0
      taipy/core/data/_tabular_datanode_mixin.py
  42. 16 48
      taipy/core/data/csv.py
  43. 16 7
      taipy/core/data/data_node.py
  44. 16 49
      taipy/core/data/excel.py
  45. 14 45
      taipy/core/data/json.py
  46. 14 46
      taipy/core/data/parquet.py
  47. 18 56
      taipy/core/data/pickle.py
  48. 35 8
      taipy/core/exceptions/exceptions.py
  49. 3 1
      taipy/core/job/_job_manager.py
  50. 1 2
      taipy/core/job/_job_manager_factory.py
  51. 1 1
      taipy/core/job/job.py
  52. 110 3
      taipy/core/scenario/_scenario_manager.py
  53. 1 2
      taipy/core/scenario/_scenario_manager_factory.py
  54. 1 1
      taipy/core/scenario/scenario.py
  55. 1 1
      taipy/core/sequence/sequence.py
  56. 1 2
      taipy/core/submission/_submission_manager_factory.py
  57. 1 1
      taipy/core/submission/submission.py
  58. 135 46
      taipy/core/taipy.py
  59. 1 2
      taipy/core/task/_task_manager_factory.py
  60. 1 1
      taipy/core/task/task.py
  61. 15 12
      taipy/gui/_renderers/builder.py
  62. 11 7
      taipy/gui/_renderers/factory.py
  63. 3 1
      taipy/gui/_renderers/json.py
  64. 24 2
      taipy/gui/extension/library.py
  65. 27 16
      taipy/gui/gui.py
  66. 1 0
      taipy/gui/utils/__init__.py
  67. 5 0
      taipy/gui/utils/types.py
  68. 52 44
      taipy/gui_core/_GuiCoreLib.py
  69. 48 6
      taipy/gui_core/_adapters.py
  70. 48 96
      taipy/gui_core/_context.py
  71. 60 8
      taipy/gui_core/viselements.json
  72. 2 3
      tests/core/_orchestrator/_dispatcher/test_dispatcher__needs_to_run.py
  73. 7 4
      tests/core/_orchestrator/test_orchestrator__submit.py
  74. 4 3
      tests/core/_orchestrator/test_orchestrator__submit_task.py
  75. 220 0
      tests/core/config/test_config_schema.py
  76. 21 15
      tests/core/conftest.py
  77. 17 0
      tests/core/cycle/test_cycle.py
  78. 4 2
      tests/core/data/test_csv_data_node.py
  79. 87 49
      tests/core/data/test_data_manager.py
  80. 16 0
      tests/core/data/test_data_node.py
  81. 5 3
      tests/core/data/test_excel_data_node.py
  82. 5 3
      tests/core/data/test_json_data_node.py
  83. 4 2
      tests/core/data/test_parquet_data_node.py
  84. 2 2
      tests/core/data/test_pickle_data_node.py
  85. 18 0
      tests/core/job/test_job.py
  86. 15 0
      tests/core/scenario/test_scenario.py
  87. 72 0
      tests/core/scenario/test_scenario_manager.py
  88. 20 0
      tests/core/sequence/test_sequence.py
  89. 15 0
      tests/core/submission/test_submission.py
  90. 16 0
      tests/core/task/test_task.py
  91. 47 0
      tests/core/test_taipy.py
  92. 80 52
      tests/core/test_taipy/test_export.py
  93. 63 56
      tests/core/test_taipy/test_export_with_sql_repo.py
  94. 213 0
      tests/core/test_taipy/test_import.py
  95. 174 0
      tests/core/test_taipy/test_import_with_sql_repo.py
  96. 1 1
      tests/gui/extension/test_library.py
  97. 6 6
      tests/gui_core/test_context_is_readable.py
  98. 17 0
      tests/templates/test_template_cli.py
  99. 16 16
      tools/packages/pipfiles/Pipfile3.10.max
  100. 16 16
      tools/packages/pipfiles/Pipfile3.11.max

+ 1 - 1
.github/actions/install/action.yml

@@ -45,7 +45,7 @@ runs:
       shell: bash
 
     - name: Setup LibMagic (MacOS)
-      if: inputs.os == 'macos-latest' && inputs.install-gui
+      if: inputs.os == 'macos-13' && inputs.install-gui
       run: brew install libmagic
       shell: bash
 

+ 1 - 0
.github/workflows/build-and-release-single-package.yml

@@ -152,6 +152,7 @@ jobs:
         working-directory: ${{ steps.set-variables.outputs.package_dir }}
         run: |
           python setup.py build_py && python -m build
+          for file in ./dist/*; do mv "$file" "${file//_/-}"; done
 
       - name: Create tag and release
         working-directory: ${{ steps.set-variables.outputs.package_dir }}

+ 1 - 1
.github/workflows/frontend.yml

@@ -15,7 +15,7 @@ jobs:
     strategy:
       matrix:
         node-version: [20.x]
-        os: [ubuntu-latest, windows-latest, macos-latest]
+        os: [ubuntu-latest, windows-latest, macos-13]
     runs-on: ${{ matrix.os }}
 
     defaults:

+ 2 - 2
.github/workflows/overall-tests.yml

@@ -47,7 +47,7 @@ jobs:
       fail-fast: false
       matrix:
         python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
-        os: [ubuntu-latest, windows-latest, macos-latest]
+        os: [ubuntu-latest, windows-latest, macos-13]
         pipfile-version: ['min', 'max']
     runs-on: ${{ matrix.os }}
     steps:
@@ -75,7 +75,7 @@ jobs:
       fail-fast: false
       matrix:
         python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
-        os: [ubuntu-latest, windows-latest, macos-latest]
+        os: [ubuntu-latest, windows-latest, macos-13]
         orchestrator: ['orchestrator_dispatcher', 'standalone']
         pipfile-version: ['min', 'max']
     runs-on: ${{ matrix.os }}

+ 1 - 1
.github/workflows/packaging.yml

@@ -18,7 +18,7 @@ jobs:
     strategy:
       matrix:
         python-versions: [ '3.8', '3.9', '3.10', '3.11', '3.12']
-        os: [ubuntu-latest, macos-latest] #, windows-latest]
+        os: [ubuntu-latest, macos-13] #, windows-latest]
 
     runs-on: ${{ matrix.os }}
 

+ 6 - 6
.github/workflows/partial-tests.yml

@@ -21,7 +21,7 @@ jobs:
       fail-fast: false
       matrix:
         python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
-        os: [ubuntu-latest, windows-latest, macos-latest]
+        os: [ubuntu-latest, windows-latest, macos-13]
     runs-on: ${{ matrix.os }}
     steps:
       - uses: actions/checkout@v4
@@ -61,7 +61,7 @@ jobs:
         run: pipenv install --dev --python=${{ matrix.python-version }}
 
       - name: Setup LibMagic (MacOS)
-        if: matrix.os == 'macos-latest'
+        if: matrix.os == 'macos-13'
         run: brew install libmagic
 
       - uses: actions/setup-node@v4
@@ -116,7 +116,7 @@ jobs:
       fail-fast: false
       matrix:
         python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
-        os: [ubuntu-latest, windows-latest, macos-latest]
+        os: [ubuntu-latest, windows-latest, macos-13]
     runs-on: ${{ matrix.os }}
     steps:
       - uses: actions/checkout@v4
@@ -141,7 +141,7 @@ jobs:
         run: pipenv install --dev --python=${{ matrix.python-version }}
 
       - name: Setup LibMagic (MacOS)
-        if: matrix.os == 'macos-latest' && steps.changes.outputs.core == 'true'
+        if: matrix.os == 'macos-13' && steps.changes.outputs.core == 'true'
         run: brew install libmagic
 
       - name: Pytest Core orchestrator_dispatcher
@@ -155,7 +155,7 @@ jobs:
       fail-fast: false
       matrix:
         python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
-        os: [ubuntu-latest, windows-latest, macos-latest]
+        os: [ubuntu-latest, windows-latest, macos-13]
     runs-on: ${{ matrix.os }}
     steps:
       - uses: actions/checkout@v4
@@ -180,7 +180,7 @@ jobs:
         run: pipenv install --dev --python=${{ matrix.python-version }}
 
       - name: Setup LibMagic (MacOS)
-        if: matrix.os == 'macos-latest' && steps.changes.outputs.core == 'true'
+        if: matrix.os == 'macos-13' && steps.changes.outputs.core == 'true'
         run: brew install libmagic
 
       - name: Pytest Core standalone

+ 1 - 1
.github/workflows/publish.yml

@@ -105,7 +105,7 @@ jobs:
     strategy:
       matrix:
         python-versions: ['3.8','3.9','3.10', '3.11', '3.12']
-        os: [ubuntu-latest,windows-latest,macos-latest]
+        os: [ubuntu-latest,windows-latest,macos-13]
     runs-on: ${{ matrix.os }}
     steps:
       - uses: actions/checkout@v3

+ 5 - 2
README.md

@@ -1,8 +1,11 @@
+[![Taipy Designer](https://github.com/nevo-david/taipy/assets/100117126/e787ba7b-ec7a-4d3f-a7e4-0f195daadce7)
+](https://taipy.io/enterprise)
+
 <div align="center">
   <a href="https://taipy.io?utm_source=github" target="_blank">
   <picture>
-    <source media="(prefers-color-scheme: dark)" srcset="https://github.com/Avaiga/taipy/assets/100117126/f59f70e9-1905-4abc-8760-8631b57c14c2">
-    <img alt="Taipy" src="readme_img/readme_logo.png" width="200" />
+    <source media="(prefers-color-scheme: dark)" srcset="https://github.com/Avaiga/taipy/assets/100117126/509bf101-54c2-4321-adaf-a2af63af9682">
+    <img alt="Taipy" src="https://github.com/Avaiga/taipy/assets/100117126/4df8a733-d8d0-4893-acf0-d24ef9e8b58a" width="400" />
   </picture>
   </a>
 </div>

+ 25 - 0
SECURITY.md

@@ -0,0 +1,25 @@
+# Security Policy
+
+## Supported Versions
+
+Taipy follows a security support policy to address security vulnerabilities promptly.
+The following table summarizes the supported and maintained versions of Taipy that receive security updates:
+
+| Version | Supported          |
+| ------- | ------------------ |
+| < 2.0   | :x:                |
+| >= 2.0  | :white_check_mark: |
+
+
+## Reporting a Vulnerability
+
+If you find a security vulnerability in Taipy, please report it immediately. We take security 
+vulnerabilities seriously, and we appreciate your responsible disclosure.
+
+To report a security vulnerability, please use the GitHub Security Advisories feature by visiting 
+the following [security URL](https://github.com/Avaiga/taipy/security).
+
+Please provide detailed information about the vulnerability, including steps to reproduce, affected 
+versions, and potential impact. We will review and respond to security vulnerability reports as soon as possible.
+
+Thank you for helping us keep Taipy secure.

+ 1 - 0
contributors.txt

@@ -14,3 +14,4 @@ enarroied
 bobbyshermi
 Forchapeatl
 yarikoptic
+Luke-0162

+ 70 - 70
frontend/taipy-gui/package-lock.json

@@ -2077,9 +2077,9 @@
       }
     },
     "node_modules/@remix-run/router": {
-      "version": "1.15.3",
-      "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.15.3.tgz",
-      "integrity": "sha512-Oy8rmScVrVxWZVOpEF57ovlnhpZ8CCPlnIIumVcV9nFdiSIrus99+Lw78ekXyGvVDlIsFJbSfmSovJUhCWYV3w==",
+      "version": "1.16.0",
+      "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.16.0.tgz",
+      "integrity": "sha512-Quz1KOffeEf/zwkCBM3kBtH4ZoZ+pT3xIXBG4PPW/XFtDP7EGhtTiC2+gpL9GnR7+Qdet5Oa6cYSvwKYg6kN9Q==",
       "engines": {
         "node": ">=14.0.0"
       }
@@ -2733,16 +2733,16 @@
       "dev": true
     },
     "node_modules/@typescript-eslint/eslint-plugin": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.0.tgz",
-      "integrity": "sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.1.tgz",
+      "integrity": "sha512-KwfdWXJBOviaBVhxO3p5TJiLpNuh2iyXyjmWN0f1nU87pwyvfS0EmjC6ukQVYVFJd/K1+0NWGPDXiyEyQorn0Q==",
       "dev": true,
       "dependencies": {
         "@eslint-community/regexpp": "^4.10.0",
-        "@typescript-eslint/scope-manager": "7.7.0",
-        "@typescript-eslint/type-utils": "7.7.0",
-        "@typescript-eslint/utils": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0",
+        "@typescript-eslint/scope-manager": "7.7.1",
+        "@typescript-eslint/type-utils": "7.7.1",
+        "@typescript-eslint/utils": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1",
         "debug": "^4.3.4",
         "graphemer": "^1.4.0",
         "ignore": "^5.3.1",
@@ -2768,15 +2768,15 @@
       }
     },
     "node_modules/@typescript-eslint/parser": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.0.tgz",
-      "integrity": "sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.1.tgz",
+      "integrity": "sha512-vmPzBOOtz48F6JAGVS/kZYk4EkXao6iGrD838sp1w3NQQC0W8ry/q641KU4PrG7AKNAf56NOcR8GOpH8l9FPCw==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/scope-manager": "7.7.0",
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/typescript-estree": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0",
+        "@typescript-eslint/scope-manager": "7.7.1",
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/typescript-estree": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1",
         "debug": "^4.3.4"
       },
       "engines": {
@@ -2796,13 +2796,13 @@
       }
     },
     "node_modules/@typescript-eslint/scope-manager": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz",
-      "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.1.tgz",
+      "integrity": "sha512-PytBif2SF+9SpEUKynYn5g1RHFddJUcyynGpztX3l/ik7KmZEv19WCMhUBkHXPU9es/VWGD3/zg3wg90+Dh2rA==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0"
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1"
       },
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -2813,13 +2813,13 @@
       }
     },
     "node_modules/@typescript-eslint/type-utils": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.0.tgz",
-      "integrity": "sha512-bOp3ejoRYrhAlnT/bozNQi3nio9tIgv3U5C0mVDdZC7cpcQEDZXvq8inrHYghLVwuNABRqrMW5tzAv88Vy77Sg==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.1.tgz",
+      "integrity": "sha512-ZksJLW3WF7o75zaBPScdW1Gbkwhd/lyeXGf1kQCxJaOeITscoSl0MjynVvCzuV5boUz/3fOI06Lz8La55mu29Q==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/typescript-estree": "7.7.0",
-        "@typescript-eslint/utils": "7.7.0",
+        "@typescript-eslint/typescript-estree": "7.7.1",
+        "@typescript-eslint/utils": "7.7.1",
         "debug": "^4.3.4",
         "ts-api-utils": "^1.3.0"
       },
@@ -2840,9 +2840,9 @@
       }
     },
     "node_modules/@typescript-eslint/types": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz",
-      "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.1.tgz",
+      "integrity": "sha512-AmPmnGW1ZLTpWa+/2omPrPfR7BcbUU4oha5VIbSbS1a1Tv966bklvLNXxp3mrbc+P2j4MNOTfDffNsk4o0c6/w==",
       "dev": true,
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -2853,13 +2853,13 @@
       }
     },
     "node_modules/@typescript-eslint/typescript-estree": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz",
-      "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.1.tgz",
+      "integrity": "sha512-CXe0JHCXru8Fa36dteXqmH2YxngKJjkQLjxzoj6LYwzZ7qZvgsLSc+eqItCrqIop8Vl2UKoAi0StVWu97FQZIQ==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0",
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1",
         "debug": "^4.3.4",
         "globby": "^11.1.0",
         "is-glob": "^4.0.3",
@@ -2881,17 +2881,17 @@
       }
     },
     "node_modules/@typescript-eslint/utils": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.0.tgz",
-      "integrity": "sha512-LKGAXMPQs8U/zMRFXDZOzmMKgFv3COlxUQ+2NMPhbqgVm6R1w+nU1i4836Pmxu9jZAuIeyySNrN/6Rc657ggig==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.1.tgz",
+      "integrity": "sha512-QUvBxPEaBXf41ZBbaidKICgVL8Hin0p6prQDu6bbetWo39BKbWJxRsErOzMNT1rXvTll+J7ChrbmMCXM9rsvOQ==",
       "dev": true,
       "dependencies": {
         "@eslint-community/eslint-utils": "^4.4.0",
         "@types/json-schema": "^7.0.15",
         "@types/semver": "^7.5.8",
-        "@typescript-eslint/scope-manager": "7.7.0",
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/typescript-estree": "7.7.0",
+        "@typescript-eslint/scope-manager": "7.7.1",
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/typescript-estree": "7.7.1",
         "semver": "^7.6.0"
       },
       "engines": {
@@ -2906,12 +2906,12 @@
       }
     },
     "node_modules/@typescript-eslint/visitor-keys": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz",
-      "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.1.tgz",
+      "integrity": "sha512-gBL3Eq25uADw1LQ9kVpf3hRM+DWzs0uZknHYK3hq4jcTPqVCClHGDnB6UUUV2SFeBeA4KWHWbbLqmbGcZ4FYbw==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.7.0",
+        "@typescript-eslint/types": "7.7.1",
         "eslint-visitor-keys": "^3.4.3"
       },
       "engines": {
@@ -4224,9 +4224,9 @@
       }
     },
     "node_modules/clsx": {
-      "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz",
-      "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==",
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
+      "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==",
       "engines": {
         "node": ">=6"
       }
@@ -5392,9 +5392,9 @@
       "integrity": "sha512-/pjZsA1b4RPHbeWZQn66SWS8nZZWLQQ23oE3Eam7aroEFGEvwKAsJfZ9ytiEMycfzXWpca4FA9QIOehf7PocBQ=="
     },
     "node_modules/electron-to-chromium": {
-      "version": "1.4.745",
-      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.745.tgz",
-      "integrity": "sha512-tRbzkaRI5gbUn5DEvF0dV4TQbMZ5CLkWeTAXmpC9IrYT+GE+x76i9p+o3RJ5l9XmdQlI1pPhVtE9uNcJJ0G0EA=="
+      "version": "1.4.747",
+      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.747.tgz",
+      "integrity": "sha512-+FnSWZIAvFHbsNVmUxhEqWiaOiPMcfum1GQzlWCg/wLigVtshOsjXHyEFfmt6cFK6+HkS3QOJBv6/3OPumbBfw=="
     },
     "node_modules/element-size": {
       "version": "1.1.1",
@@ -6668,9 +6668,9 @@
       "integrity": "sha512-wcCp8vu8FT22BnvKVPjXa/ICBWRq/zjFfdofZy1WSpQZpphblv12/bOQLBC1rMM7SGOFS9ltVmKOHil5+Ml7gA=="
     },
     "node_modules/gl-text": {
-      "version": "1.3.1",
-      "resolved": "https://registry.npmjs.org/gl-text/-/gl-text-1.3.1.tgz",
-      "integrity": "sha512-/f5gcEMiZd+UTBJLTl3D+CkCB/0UFGTx3nflH8ZmyWcLkZhsZ1+Xx5YYkw2rgWAzgPeE35xCqBuHSoMKQVsR+w==",
+      "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/gl-text/-/gl-text-1.4.0.tgz",
+      "integrity": "sha512-o47+XBqLCj1efmuNyCHt7/UEJmB9l66ql7pnobD6p+sgmBUdzfMZXIF0zD2+KRfpd99DJN+QXdvTFAGCKCVSmQ==",
       "dependencies": {
         "bit-twiddle": "^1.0.2",
         "color-normalize": "^1.5.0",
@@ -9912,9 +9912,9 @@
       }
     },
     "node_modules/micromark-core-commonmark": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.0.tgz",
-      "integrity": "sha512-jThOz/pVmAYUtkroV3D5c1osFXAMv9e0ypGDOIZuCeAe91/sD6BoE2Sjzt30yuXtwOYUmySOhMas/PVyh02itA==",
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.1.tgz",
+      "integrity": "sha512-CUQyKr1e///ZODyD1U3xit6zXwy1a8q2a1S1HKtIlmgvurrEpaw/Y9y6KSIbF8P59cn/NjzHyO+Q2fAyYLQrAA==",
       "funding": [
         {
           "type": "GitHub Sponsors",
@@ -11102,9 +11102,9 @@
       }
     },
     "node_modules/plotly.js": {
-      "version": "2.31.1",
-      "resolved": "https://registry.npmjs.org/plotly.js/-/plotly.js-2.31.1.tgz",
-      "integrity": "sha512-KEdSZHDyd7bIj8IY/7JTenRHof5MwbhT6CLsjhbzVhy/XDw35Pju+2M7J6nbPcmnnRzmSTjoBch1gNzcJc50vA==",
+      "version": "2.32.0",
+      "resolved": "https://registry.npmjs.org/plotly.js/-/plotly.js-2.32.0.tgz",
+      "integrity": "sha512-QBYyfVFs1XdoXQBq/f7SoiqQD/BEyDA5WwvN1NwY4ZTrTX6GmJ5jE5ydlt1I4K8i5W6H1atgti31jcSYD6StKA==",
       "dependencies": {
         "@plotly/d3": "3.8.1",
         "@plotly/d3-sankey": "0.7.2",
@@ -11130,7 +11130,7 @@
         "d3-time-format": "^2.2.3",
         "fast-isnumeric": "^1.1.4",
         "gl-mat4": "^1.2.0",
-        "gl-text": "^1.3.1",
+        "gl-text": "^1.4.0",
         "has-hover": "^1.0.1",
         "has-passive-events": "^1.0.0",
         "is-mobile": "^4.0.0",
@@ -11626,11 +11626,11 @@
       }
     },
     "node_modules/react-router": {
-      "version": "6.22.3",
-      "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.22.3.tgz",
-      "integrity": "sha512-dr2eb3Mj5zK2YISHK++foM9w4eBnO23eKnZEDs7c880P6oKbrjz/Svg9+nxqtHQK+oMW4OtjZca0RqPglXxguQ==",
+      "version": "6.23.0",
+      "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.23.0.tgz",
+      "integrity": "sha512-wPMZ8S2TuPadH0sF5irFGjkNLIcRvOSaEe7v+JER8508dyJumm6XZB1u5kztlX0RVq6AzRVndzqcUh6sFIauzA==",
       "dependencies": {
-        "@remix-run/router": "1.15.3"
+        "@remix-run/router": "1.16.0"
       },
       "engines": {
         "node": ">=14.0.0"
@@ -11640,12 +11640,12 @@
       }
     },
     "node_modules/react-router-dom": {
-      "version": "6.22.3",
-      "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.22.3.tgz",
-      "integrity": "sha512-7ZILI7HjcE+p31oQvwbokjk6OA/bnFxrhJ19n82Ex9Ph8fNAq+Hm/7KchpMGlTgWhUxRHMMCut+vEtNpWpowKw==",
+      "version": "6.23.0",
+      "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.23.0.tgz",
+      "integrity": "sha512-Q9YaSYvubwgbal2c9DJKfx6hTNoBp3iJDsl+Duva/DwxoJH+OTXkxGpql4iUK2sla/8z4RpjAm6EWx1qUDuopQ==",
       "dependencies": {
-        "@remix-run/router": "1.15.3",
-        "react-router": "6.22.3"
+        "@remix-run/router": "1.16.0",
+        "react-router": "6.23.0"
       },
       "engines": {
         "node": ">=14.0.0"

+ 2 - 1
frontend/taipy-gui/packaging/taipy-gui.d.ts

@@ -272,7 +272,8 @@ export declare const createRequestUpdateAction: (
     id: string | undefined,
     context: string | undefined,
     names: string[],
-    forceRefresh?: boolean
+    forceRefresh?: boolean,
+    stateContext?: Record<string, unknown>
 ) => Action;
 /**
  * A column description as received by the backend.

+ 3 - 1
frontend/taipy-gui/src/components/Taipy/AutoLoadingTable.tsx

@@ -406,7 +406,8 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
                         handleNan,
                         afs,
                         compare ? onCompare : undefined,
-                        updateVars && getUpdateVar(updateVars, "comparedatas")
+                        updateVars && getUpdateVar(updateVars, "comparedatas"),
+                        typeof userData == "object" ? (userData as Record<string, Record<string, unknown>>).context : undefined
                     )
                 );
             });
@@ -428,6 +429,7 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
             onCompare,
             dispatch,
             module,
+            userData
         ]
     );
 

+ 1 - 1
frontend/taipy-gui/src/components/Taipy/Chart.tsx

@@ -233,7 +233,7 @@ const TaipyPlotlyButtons: ModeBarButtonAny[] = [
             if (height) {
                 gd.attributeStyleMap.set("height", height);
             } else {
-                gd.setAttribute("data-height", getComputedStyle(gd.querySelector(".svg-container") || gd).height)
+                gd.setAttribute("data-height", getComputedStyle(gd).height)
             }
             window.dispatchEvent(new Event('resize'));
         },

+ 3 - 1
frontend/taipy-gui/src/components/Taipy/PaginatedTable.tsx

@@ -258,7 +258,8 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
                     handleNan,
                     afs,
                     compare ? onCompare : undefined,
-                    updateVars && getUpdateVar(updateVars, "comparedatas")
+                    updateVars && getUpdateVar(updateVars, "comparedatas"),
+                    typeof userData == "object" ? (userData as Record<string, Record<string, unknown>>).context : undefined
                 )
             );
         } else {
@@ -285,6 +286,7 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
         module,
         compare,
         onCompare,
+        userData
     ]);
 
     const onSort = useCallback(

+ 8 - 9
frontend/taipy-gui/src/components/pages/TaipyRendered.tsx

@@ -99,16 +99,15 @@ const TaipyRendered = (props: TaipyRenderedProps) => {
                         Array.isArray(result.data.head) && setHead(result.data.head);
                     }
                 })
-                .catch((error) =>
+                .catch((error) => {
+                    const res =
+                        error.response?.data && /<p\sclass=\"errormsg\">([\s\S]*?)<\/p>/gm.exec(error.response?.data);
                     setPageState({
-                        jsx: `<h1>${
-                            error.response?.data ||
-                            `No data fetched from backend from ${
-                                path === "/TaiPy_root_page" ? baseURL : baseURL + path
-                            }`
-                        }</h1><br></br>${error}`,
-                    })
-                );
+                        jsx: `<h1>${res ? res[0] : "Unknown Error"}</h1><h2>No data fetched from backend from ${
+                            path === "/TaiPy_root_page" ? baseURL : baseURL + path
+                        }</h2><br></br>${res[0] ? "" : error}`,
+                    });
+                });
         }
         // eslint-disable-next-line react-hooks/exhaustive-deps
     }, [path, state.id, dispatch, partial, fromBlock, baseURL]);

+ 24 - 9
frontend/taipy-gui/src/context/taipyReducers.ts

@@ -576,6 +576,15 @@ export const createRequestChartUpdateAction = (
         true
     );
 
+const ligtenPayload = (payload: Record<string, unknown>) => {
+    return Object.keys(payload || {}).reduce((pv, key) => {
+        if (payload[key] !== undefined) {
+            pv[key] = payload[key];
+        }
+        return pv;
+    }, {} as typeof payload)
+}
+
 export const createRequestTableUpdateAction = (
     name: string | undefined,
     id: string | undefined,
@@ -593,9 +602,10 @@ export const createRequestTableUpdateAction = (
     handleNan?: boolean,
     filters?: Array<FilterDesc>,
     compare?: string,
-    compareDatas?: string
+    compareDatas?: string,
+    stateContext?: Record<string, unknown>
 ): TaipyAction =>
-    createRequestDataUpdateAction(name, id, context, columns, pageKey, {
+    createRequestDataUpdateAction(name, id, context, columns, pageKey, ligtenPayload({
         start: start,
         end: end,
         orderby: orderBy,
@@ -608,7 +618,8 @@ export const createRequestTableUpdateAction = (
         filters: filters,
         compare: compare,
         compare_datas: compareDatas,
-    });
+        state_context: stateContext,
+    }));
 
 export const createRequestInfiniteTableUpdateAction = (
     name: string | undefined,
@@ -627,9 +638,10 @@ export const createRequestInfiniteTableUpdateAction = (
     handleNan?: boolean,
     filters?: Array<FilterDesc>,
     compare?: string,
-    compareDatas?: string
+    compareDatas?: string,
+    stateContext?: Record<string, unknown>
 ): TaipyAction =>
-    createRequestDataUpdateAction(name, id, context, columns, pageKey, {
+    createRequestDataUpdateAction(name, id, context, columns, pageKey, ligtenPayload({
         infinite: true,
         start: start,
         end: end,
@@ -643,7 +655,8 @@ export const createRequestInfiniteTableUpdateAction = (
         filters: filters,
         compare: compare,
         compare_datas: compareDatas,
-    });
+        state_context: stateContext,
+    }));
 
 /**
  * Create a *request data update* `Action` that will be used to update the `Context`.
@@ -710,16 +723,18 @@ export const createRequestUpdateAction = (
     id: string | undefined,
     context: string | undefined,
     names: string[],
-    forceRefresh = false
+    forceRefresh = false,
+    stateContext?: Record<string, unknown>
 ): TaipyAction => ({
     type: Types.RequestUpdate,
     name: "",
     context: context,
-    payload: {
+    payload: ligtenPayload({
         id: id,
         names: names,
         refresh: forceRefresh,
-    },
+        state_context: stateContext,
+    }),
 });
 
 export const createSetLocationsAction = (locations: Record<string, string>): TaipyAction => ({

+ 1 - 1
frontend/taipy-gui/src/workers/fileupload.worker.ts

@@ -82,8 +82,8 @@ const process = (files: FileList, uploadUrl: string, varName: string, id: string
 
                 start = end;
                 end = start + BYTES_PER_CHUNK;
-                uploadedFiles.push(blob.name);
             }
+            uploadedFiles.push(blob.name);
         }
         self.postMessage({
             progress: 100,

+ 55 - 55
frontend/taipy/package-lock.json

@@ -1206,16 +1206,16 @@
       "dev": true
     },
     "node_modules/@typescript-eslint/eslint-plugin": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.0.tgz",
-      "integrity": "sha512-GJWR0YnfrKnsRoluVO3PRb9r5aMZriiMMM/RHj5nnTrBy1/wIgk76XCtCKcnXGjpZQJQRFtGV9/0JJ6n30uwpQ==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.1.tgz",
+      "integrity": "sha512-KwfdWXJBOviaBVhxO3p5TJiLpNuh2iyXyjmWN0f1nU87pwyvfS0EmjC6ukQVYVFJd/K1+0NWGPDXiyEyQorn0Q==",
       "dev": true,
       "dependencies": {
         "@eslint-community/regexpp": "^4.10.0",
-        "@typescript-eslint/scope-manager": "7.7.0",
-        "@typescript-eslint/type-utils": "7.7.0",
-        "@typescript-eslint/utils": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0",
+        "@typescript-eslint/scope-manager": "7.7.1",
+        "@typescript-eslint/type-utils": "7.7.1",
+        "@typescript-eslint/utils": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1",
         "debug": "^4.3.4",
         "graphemer": "^1.4.0",
         "ignore": "^5.3.1",
@@ -1241,15 +1241,15 @@
       }
     },
     "node_modules/@typescript-eslint/parser": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.0.tgz",
-      "integrity": "sha512-fNcDm3wSwVM8QYL4HKVBggdIPAy9Q41vcvC/GtDobw3c4ndVT3K6cqudUmjHPw8EAp4ufax0o58/xvWaP2FmTg==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.7.1.tgz",
+      "integrity": "sha512-vmPzBOOtz48F6JAGVS/kZYk4EkXao6iGrD838sp1w3NQQC0W8ry/q641KU4PrG7AKNAf56NOcR8GOpH8l9FPCw==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/scope-manager": "7.7.0",
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/typescript-estree": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0",
+        "@typescript-eslint/scope-manager": "7.7.1",
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/typescript-estree": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1",
         "debug": "^4.3.4"
       },
       "engines": {
@@ -1269,13 +1269,13 @@
       }
     },
     "node_modules/@typescript-eslint/scope-manager": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.0.tgz",
-      "integrity": "sha512-/8INDn0YLInbe9Wt7dK4cXLDYp0fNHP5xKLHvZl3mOT5X17rK/YShXaiNmorl+/U4VKCVIjJnx4Ri5b0y+HClw==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.1.tgz",
+      "integrity": "sha512-PytBif2SF+9SpEUKynYn5g1RHFddJUcyynGpztX3l/ik7KmZEv19WCMhUBkHXPU9es/VWGD3/zg3wg90+Dh2rA==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0"
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1"
       },
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -1286,13 +1286,13 @@
       }
     },
     "node_modules/@typescript-eslint/type-utils": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.0.tgz",
-      "integrity": "sha512-bOp3ejoRYrhAlnT/bozNQi3nio9tIgv3U5C0mVDdZC7cpcQEDZXvq8inrHYghLVwuNABRqrMW5tzAv88Vy77Sg==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.1.tgz",
+      "integrity": "sha512-ZksJLW3WF7o75zaBPScdW1Gbkwhd/lyeXGf1kQCxJaOeITscoSl0MjynVvCzuV5boUz/3fOI06Lz8La55mu29Q==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/typescript-estree": "7.7.0",
-        "@typescript-eslint/utils": "7.7.0",
+        "@typescript-eslint/typescript-estree": "7.7.1",
+        "@typescript-eslint/utils": "7.7.1",
         "debug": "^4.3.4",
         "ts-api-utils": "^1.3.0"
       },
@@ -1313,9 +1313,9 @@
       }
     },
     "node_modules/@typescript-eslint/types": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.0.tgz",
-      "integrity": "sha512-G01YPZ1Bd2hn+KPpIbrAhEWOn5lQBrjxkzHkWvP6NucMXFtfXoevK82hzQdpfuQYuhkvFDeQYbzXCjR1z9Z03w==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.1.tgz",
+      "integrity": "sha512-AmPmnGW1ZLTpWa+/2omPrPfR7BcbUU4oha5VIbSbS1a1Tv966bklvLNXxp3mrbc+P2j4MNOTfDffNsk4o0c6/w==",
       "dev": true,
       "engines": {
         "node": "^18.18.0 || >=20.0.0"
@@ -1326,13 +1326,13 @@
       }
     },
     "node_modules/@typescript-eslint/typescript-estree": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.0.tgz",
-      "integrity": "sha512-8p71HQPE6CbxIBy2kWHqM1KGrC07pk6RJn40n0DSc6bMOBBREZxSDJ+BmRzc8B5OdaMh1ty3mkuWRg4sCFiDQQ==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.1.tgz",
+      "integrity": "sha512-CXe0JHCXru8Fa36dteXqmH2YxngKJjkQLjxzoj6LYwzZ7qZvgsLSc+eqItCrqIop8Vl2UKoAi0StVWu97FQZIQ==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/visitor-keys": "7.7.0",
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/visitor-keys": "7.7.1",
         "debug": "^4.3.4",
         "globby": "^11.1.0",
         "is-glob": "^4.0.3",
@@ -1354,17 +1354,17 @@
       }
     },
     "node_modules/@typescript-eslint/utils": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.0.tgz",
-      "integrity": "sha512-LKGAXMPQs8U/zMRFXDZOzmMKgFv3COlxUQ+2NMPhbqgVm6R1w+nU1i4836Pmxu9jZAuIeyySNrN/6Rc657ggig==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.1.tgz",
+      "integrity": "sha512-QUvBxPEaBXf41ZBbaidKICgVL8Hin0p6prQDu6bbetWo39BKbWJxRsErOzMNT1rXvTll+J7ChrbmMCXM9rsvOQ==",
       "dev": true,
       "dependencies": {
         "@eslint-community/eslint-utils": "^4.4.0",
         "@types/json-schema": "^7.0.15",
         "@types/semver": "^7.5.8",
-        "@typescript-eslint/scope-manager": "7.7.0",
-        "@typescript-eslint/types": "7.7.0",
-        "@typescript-eslint/typescript-estree": "7.7.0",
+        "@typescript-eslint/scope-manager": "7.7.1",
+        "@typescript-eslint/types": "7.7.1",
+        "@typescript-eslint/typescript-estree": "7.7.1",
         "semver": "^7.6.0"
       },
       "engines": {
@@ -1379,12 +1379,12 @@
       }
     },
     "node_modules/@typescript-eslint/visitor-keys": {
-      "version": "7.7.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.0.tgz",
-      "integrity": "sha512-h0WHOj8MhdhY8YWkzIF30R379y0NqyOHExI9N9KCzvmu05EgG4FumeYa3ccfKUSphyWkWQE1ybVrgz/Pbam6YA==",
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.1.tgz",
+      "integrity": "sha512-gBL3Eq25uADw1LQ9kVpf3hRM+DWzs0uZknHYK3hq4jcTPqVCClHGDnB6UUUV2SFeBeA4KWHWbbLqmbGcZ4FYbw==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.7.0",
+        "@typescript-eslint/types": "7.7.1",
         "eslint-visitor-keys": "^3.4.3"
       },
       "engines": {
@@ -2077,9 +2077,9 @@
       }
     },
     "node_modules/clsx": {
-      "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz",
-      "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==",
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
+      "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==",
       "engines": {
         "node": ">=6"
       }
@@ -2345,9 +2345,9 @@
       }
     },
     "node_modules/electron-to-chromium": {
-      "version": "1.4.745",
-      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.745.tgz",
-      "integrity": "sha512-tRbzkaRI5gbUn5DEvF0dV4TQbMZ5CLkWeTAXmpC9IrYT+GE+x76i9p+o3RJ5l9XmdQlI1pPhVtE9uNcJJ0G0EA==",
+      "version": "1.4.748",
+      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.748.tgz",
+      "integrity": "sha512-VWqjOlPZn70UZ8FTKUOkUvBLeTQ0xpty66qV0yJcAGY2/CthI4xyW9aEozRVtuwv3Kpf5xTesmJUcPwuJmgP4A==",
       "dev": true
     },
     "node_modules/enhanced-resolve": {
@@ -2465,14 +2465,14 @@
       }
     },
     "node_modules/es-iterator-helpers": {
-      "version": "1.0.18",
-      "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.18.tgz",
-      "integrity": "sha512-scxAJaewsahbqTYrGKJihhViaM6DDZDDoucfvzNbK0pOren1g/daDQ3IAhzn+1G14rBG7w+i5N+qul60++zlKA==",
+      "version": "1.0.19",
+      "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz",
+      "integrity": "sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==",
       "dev": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "define-properties": "^1.2.1",
-        "es-abstract": "^1.23.0",
+        "es-abstract": "^1.23.3",
         "es-errors": "^1.3.0",
         "es-set-tostringtag": "^2.0.3",
         "function-bind": "^1.1.2",
@@ -3025,9 +3025,9 @@
       }
     },
     "node_modules/formik": {
-      "version": "2.4.5",
-      "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.5.tgz",
-      "integrity": "sha512-Gxlht0TD3vVdzMDHwkiNZqJ7Mvg77xQNfmBRrNtvzcHZs72TJppSTDKHpImCMJZwcWPBJ8jSQQ95GJzXFf1nAQ==",
+      "version": "2.4.6",
+      "resolved": "https://registry.npmjs.org/formik/-/formik-2.4.6.tgz",
+      "integrity": "sha512-A+2EI7U7aG296q2TLGvNapDNTZp1khVt5Vk0Q/fyfSROss0V/V6+txt2aJnwEos44IxTCW/LYAi/zgWzlevj+g==",
       "funding": [
         {
           "type": "individual",

+ 48 - 44
frontend/taipy/src/CoreSelector.tsx

@@ -73,7 +73,7 @@ interface CoreSelectorProps {
     displayCycles?: boolean;
     showPrimaryFlag?: boolean;
     propagate?: boolean;
-    value?: string;
+    value?: string | string[];
     defaultValue?: string;
     height: string;
     libClassName?: string;
@@ -84,7 +84,7 @@ interface CoreSelectorProps {
     leafType: NodeType;
     editComponent?: ComponentType<EditProps>;
     showPins?: boolean;
-    onSelect?: (id: string) => void;
+    onSelect?: (id: string | string[]) => void;
 }
 
 const tinyPinIconButtonSx = (theme: Theme) => ({
@@ -233,7 +233,7 @@ const getExpandedIds = (nodeId: string, exp?: string[], entities?: Entities) =>
         const res = ret[1].map((r) => r[0]);
         return exp ? [...exp, ...res] : res;
     }
-    return exp;
+    return exp || [];
 };
 
 const CoreSelector = (props: CoreSelectorProps) => {
@@ -256,80 +256,83 @@ const CoreSelector = (props: CoreSelectorProps) => {
         coreChanged,
     } = props;
 
-    const [selected, setSelected] = useState("");
+    const [selectedItems, setSelectedItems] = useState<string[]>([]);
     const [pins, setPins] = useState<[Pinned, Pinned]>([{}, {}]);
     const [hideNonPinned, setShowPinned] = useState(false);
-    const [expandedItems, setExpandedItems] = useState<string[]>();
+    const [expandedItems, setExpandedItems] = useState<string[]>([]);
 
     const dispatch = useDispatch();
     const module = useModule();
 
     useDispatchRequestUpdateOnFirstRender(dispatch, id, module, updateVars, undefined, true);
 
+    const onItemExpand = useCallback((e: SyntheticEvent, itemId: string, expanded: boolean) => {
+        setExpandedItems((old) => {
+            if (!expanded) {
+                return old.filter((id) => id != itemId);
+            }
+            return [...old, itemId];
+        });
+    }, []);
+
     const onNodeSelect = useCallback(
         (e: SyntheticEvent, nodeId: string, isSelected: boolean) => {
-            if (!isSelected) {
+            const { selectable = "false" } = e.currentTarget.parentElement?.dataset || {};
+            const isSelectable = selectable === "true";
+            if (!isSelectable && multiple) {
                 return;
             }
-            const { selectable = "false" } = e.currentTarget.parentElement?.dataset || {};
-            const scenariosVar = getUpdateVar(updateVars, lovPropertyName);
-            dispatch(
-                createSendUpdateAction(
-                    updateVarName,
-                    selectable === "true" ? nodeId : undefined,
-                    module,
-                    onChange,
-                    propagate,
-                    scenariosVar
-                )
-            );
-            setSelected(nodeId);
-            onSelect && selectable && onSelect(nodeId);
+            setSelectedItems((old) => {
+                const res = isSelected ? [...old, nodeId] : old.filter((id) => id !== nodeId);
+                const scenariosVar = getUpdateVar(updateVars, lovPropertyName);
+                const val = multiple ? res : isSelectable ? nodeId : "";
+                setTimeout(() => dispatch(createSendUpdateAction(updateVarName, val, module, onChange, propagate, scenariosVar)), 1);
+                onSelect && isSelectable && onSelect(val);
+                return res;
+            });
         },
-        [updateVarName, updateVars, onChange, onSelect, propagate, dispatch, module, lovPropertyName]
+        [updateVarName, updateVars, onChange, onSelect, multiple, propagate, dispatch, module, lovPropertyName]
     );
 
-    const unselect = useCallback(() => {
-        setSelected((sel) => {
-            if (sel) {
-                const lovVar = getUpdateVar(updateVars, lovPropertyName);
-                dispatch(createSendUpdateAction(updateVarName, undefined, module, onChange, propagate, lovVar));
-                return "";
-            }
-            return sel;
-        });
-    }, [updateVarName, updateVars, onChange, propagate, dispatch, module, lovPropertyName]);
-
     useEffect(() => {
         if (value !== undefined && value !== null) {
-            setSelected(value);
-            setExpandedItems((exp) => getExpandedIds(value, exp, props.entities));
+            setSelectedItems(Array.isArray(value) ? value : value ? [value]: []);
+            setExpandedItems((exp) => typeof value === "string" ? getExpandedIds(value, exp, props.entities) : exp);
         } else if (defaultValue) {
             try {
                 const parsedValue = JSON.parse(defaultValue);
                 if (Array.isArray(parsedValue)) {
-                    if (parsedValue.length) {
-                        setSelected(parsedValue[0]);
+                    setSelectedItems(parsedValue);
+                    if (parsedValue.length > 1) {
                         setExpandedItems((exp) => getExpandedIds(parsedValue[0], exp, props.entities));
                     }
                 } else {
-                    setSelected(parsedValue);
+                    setSelectedItems([parsedValue]);
                     setExpandedItems((exp) => getExpandedIds(parsedValue, exp, props.entities));
                 }
             } catch {
-                setSelected(defaultValue);
+                setSelectedItems([defaultValue]);
                 setExpandedItems((exp) => getExpandedIds(defaultValue, exp, props.entities));
             }
         } else if (value === null) {
-            setSelected("");
+            setSelectedItems([]);
         }
     }, [defaultValue, value, props.entities]);
 
     useEffect(() => {
         if (entities && !entities.length) {
-            unselect();
-        }
-    }, [entities, unselect]);
+            setSelectedItems((old) => {
+                if (old.length) {
+                    const lovVar = getUpdateVar(updateVars, lovPropertyName);
+                    setTimeout(() => dispatch(
+                        createSendUpdateAction(updateVarName, multiple ? [] : "", module, onChange, propagate, lovVar)
+                    ), 1);
+                    return [];
+                }
+                return old;
+            });
+            }
+    }, [entities, updateVars, lovPropertyName, updateVarName, multiple, module, onChange, propagate, dispatch]);
 
     // Refresh on broadcast
     useEffect(() => {
@@ -417,9 +420,10 @@ const CoreSelector = (props: CoreSelectorProps) => {
                 slots={treeSlots}
                 sx={treeViewSx}
                 onItemSelectionToggle={onNodeSelect}
-                selectedItems={selected}
-                multiSelect={multiple && !multiple}
+                selectedItems={selectedItems}
+                multiSelect={multiple}
                 expandedItems={expandedItems}
+                onItemExpansionToggle={onItemExpand}
             >
                 {entities
                     ? entities.map((item) => (

+ 7 - 1
frontend/taipy/src/DataNodeTable.tsx

@@ -51,6 +51,7 @@ interface DataNodeTableProps {
     editInProgress?: boolean;
     editLock: MutableRefObject<boolean>;
     editable: boolean;
+    idVar?: string;
 }
 
 const pushRightSx = { ml: "auto" };
@@ -117,7 +118,11 @@ const DataNodeTable = (props: DataNodeTableProps) => {
         [nodeId, dispatch, module, props.onLock, props.editLock]
     );
 
-    const userData = useMemo(() => ({ dn_id: nodeId, comment: "" }), [nodeId]);
+    const userData = useMemo(() => {
+        const ret: Record<string, unknown> = {dn_id: nodeId, comment: ""};
+        props.idVar && (ret.context = { [props.idVar]: nodeId });
+        return ret
+    }, [nodeId, props.idVar]);
     const [comment, setComment] = useState("");
     const changeComment = useCallback(
         (e: ChangeEvent<HTMLInputElement>) => {
@@ -198,6 +203,7 @@ const DataNodeTable = (props: DataNodeTableProps) => {
                 onEdit={tableEdit ? props.onEdit : undefined}
                 filter={true}
                 libClassName="taipy-table"
+                pageSize={25}
             />
         </>
     );

+ 341 - 209
frontend/taipy/src/DataNodeViewer.tsx

@@ -23,10 +23,10 @@ import React, {
     MouseEvent,
     useRef,
 } from "react";
-import { CheckCircle, Cancel, ArrowForwardIosSharp, Launch, LockOutlined } from "@mui/icons-material";
 import Accordion from "@mui/material/Accordion";
 import AccordionDetails from "@mui/material/AccordionDetails";
 import AccordionSummary from "@mui/material/AccordionSummary";
+import Alert from "@mui/material/Alert";
 import Box from "@mui/material/Box";
 import Divider from "@mui/material/Divider";
 import Grid from "@mui/material/Grid";
@@ -39,6 +39,13 @@ import Tabs from "@mui/material/Tabs";
 import TextField from "@mui/material/TextField";
 import Tooltip from "@mui/material/Tooltip";
 import Typography from "@mui/material/Typography";
+
+import CheckCircle from "@mui/icons-material/CheckCircle";
+import Cancel from "@mui/icons-material/Cancel";
+import ArrowForwardIosSharp from "@mui/icons-material/ArrowForwardIosSharp";
+import Launch from "@mui/icons-material/Launch";
+import LockOutlined from "@mui/icons-material/LockOutlined";
+
 import { DateTimePicker } from "@mui/x-date-pickers/DateTimePicker";
 import { BaseDateTimePickerSlotProps } from "@mui/x-date-pickers/DateTimePicker/shared";
 import { LocalizationProvider } from "@mui/x-date-pickers/LocalizationProvider";
@@ -67,13 +74,14 @@ import {
     IconPaddingSx,
     MainBoxSx,
     TableViewType,
+    getUpdateVarNames,
     hoverSx,
     iconLabelSx,
     popoverOrigin,
     tinySelPinIconButtonSx,
     useClassNames,
 } from "./utils";
-import PropertiesEditor from "./PropertiesEditor";
+import PropertiesEditor, { DatanodeProperties } from "./PropertiesEditor";
 import { NodeType, Scenarios } from "./utils/types";
 import CoreSelector from "./CoreSelector";
 import { useUniqueId } from "./utils/hooks";
@@ -103,7 +111,7 @@ type DataNodeFull = [
     string, // ownerId
     string, // ownerLabel
     number, // ownerType
-    Array<[string, string]>, // properties
+    DatanodeData, // data
     boolean, // editInProgress
     string, // editorId
     boolean, // readable
@@ -120,7 +128,7 @@ enum DataNodeFullProps {
     ownerId,
     ownerLabel,
     ownerType,
-    properties,
+    data,
     editInProgress,
     editorId,
     readable,
@@ -145,7 +153,6 @@ interface DataNodeViewerProps {
     defaultDataNode?: string;
     dataNode?: DataNodeFull | Array<DataNodeFull>;
     onEdit?: string;
-    onIdSelect?: string;
     error?: string;
     coreChanged?: Record<string, unknown>;
     defaultActive: boolean;
@@ -166,11 +173,13 @@ interface DataNodeViewerProps {
     data?: DatanodeData;
     tabularData?: TableValueType;
     tabularColumns?: string;
+    dnProperties?: DatanodeProperties;
     onDataValue?: string;
     onTabularDataEdit?: string;
     chartConfig?: string;
     width?: string;
     onLock?: string;
+    updateDnVars?: string;
 }
 
 const dataValueFocus = "data-value";
@@ -184,7 +193,22 @@ const getValidDataNode = (datanode: DataNodeFull | DataNodeFull[]) =>
         ? (datanode[0] as DataNodeFull)
         : undefined;
 
-const invalidDatanode: DataNodeFull = ["", "", "", "", "", "", "", "", -1, [], false, "", false, false];
+const invalidDatanode: DataNodeFull = [
+    "",
+    "",
+    "",
+    "",
+    "",
+    "",
+    "",
+    "",
+    -1,
+    [null, null, null, null],
+    false,
+    "",
+    false,
+    false,
+];
 
 enum TabValues {
     Data,
@@ -204,6 +228,8 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
         showProperties = true,
         showHistory = true,
         showData = true,
+        updateVars = "",
+        updateDnVars = "",
     } = props;
 
     const { state, dispatch } = useContext<Store>(Context);
@@ -224,12 +250,16 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
         dnOwnerId,
         dnOwnerLabel,
         dnOwnerType,
-        dnProperties,
+        dnData,
         dnEditInProgress,
         dnEditorId,
         dnReadable,
         dnEditable,
     ] = datanode;
+    const dtType = dnData[DatanodeDataProps.type];
+    const dtValue = dnData[DatanodeDataProps.value] ?? (dtType == "float" ? null : undefined);
+    const dtTabular = dnData[DatanodeDataProps.tabular] ?? false;
+    const dtError = dnData[DatanodeDataProps.error];
 
     // Tabs
     const [tabValue, setTabValue] = useState<TabValues>(TabValues.Data);
@@ -237,25 +267,64 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
         (_: SyntheticEvent, newValue: number) => {
             if (valid) {
                 if (newValue == TabValues.History) {
-                    setHistoryRequested(
-                        (req) =>
-                            req ||
-                            dispatch(createSendActionNameAction(id, module, props.onIdSelect, { history_id: dnId })) ||
-                            true
-                    );
+                    setHistoryRequested((req) => {
+                        if (!req) {
+                            const idVar = getUpdateVar(updateDnVars, "history_id");
+                            dispatch(
+                                createRequestUpdateAction(
+                                    id,
+                                    module,
+                                    getUpdateVarNames(updateVars, "history"),
+                                    true,
+                                    idVar ? { [idVar]: dnId } : undefined
+                                )
+                            );
+                        }
+                        return true;
+                    });
+                    setDataRequested(false);
+                    setPropertiesRequested(false);
                 } else if (newValue == TabValues.Data) {
-                    setDataRequested(
-                        (req) =>
-                            req ||
-                            dispatch(createSendActionNameAction(id, module, props.onIdSelect, { data_id: dnId })) ||
-                            true
-                    );
+                    setDataRequested((req) => {
+                        if (!req && dtTabular) {
+                            const idVar = getUpdateVar(updateDnVars, "data_id");
+                            dispatch(
+                                createRequestUpdateAction(
+                                    id,
+                                    module,
+                                    getUpdateVarNames(updateVars, "tabularData", "tabularColumns"),
+                                    true,
+                                    idVar ? { [idVar]: dnId } : undefined
+                                )
+                            );
+                        }
+                        return true;
+                    });
+                    setHistoryRequested(false);
+                    setPropertiesRequested(false);
+                } else if (newValue == TabValues.Properties) {
+                    setPropertiesRequested((req) => {
+                        if (!req) {
+                            const idVar = getUpdateVar(updateDnVars, "properties_id");
+                            dispatch(
+                                createRequestUpdateAction(
+                                    id,
+                                    module,
+                                    getUpdateVarNames(updateVars, "properties"),
+                                    true,
+                                    idVar ? { [idVar]: dnId } : undefined
+                                )
+                            );
+                        }
+                        return true;
+                    });
+                    setDataRequested(false);
                     setHistoryRequested(false);
                 }
                 setTabValue(newValue);
             }
         },
-        [dnId, dispatch, id, valid, module, props.onIdSelect]
+        [dnId, dispatch, id, valid, module, updateVars, updateDnVars, dtTabular]
     );
 
     useEffect(() => {
@@ -278,11 +347,16 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
             const isNewDn = oldDn[DataNodeFullProps.id] !== newDnId;
             // clean lock on change
             if (oldDn[DataNodeFullProps.id] && isNewDn && editLock.current) {
-                dispatch(
-                    createSendActionNameAction(id, module, props.onLock, {
-                        id: oldDn[DataNodeFullProps.id],
-                        lock: false,
-                    })
+                const oldId = oldDn[DataNodeFullProps.id];
+                setTimeout(
+                    () =>
+                        dispatch(
+                            createSendActionNameAction(id, module, props.onLock, {
+                                id: oldId,
+                                lock: false,
+                            })
+                        ),
+                    1
                 );
             }
             if (!dn || isNewDn) {
@@ -294,21 +368,62 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
             editLock.current = dn[DataNodeFullProps.editInProgress];
             setHistoryRequested((req) => {
                 if (req && !isNewDn && tabValue == TabValues.History) {
-                    dispatch(
-                        createSendActionNameAction(id, module, props.onIdSelect, {
-                            history_id: newDnId,
-                        })
+                    const idVar = getUpdateVar(updateDnVars, "history_id");
+                    const vars = getUpdateVarNames(updateVars, "history");
+                    setTimeout(
+                        () =>
+                            dispatch(
+                                createRequestUpdateAction(
+                                    id,
+                                    module,
+                                    vars,
+                                    true,
+                                    idVar ? { [idVar]: newDnId } : undefined
+                                )
+                            ),
+                        1
                     );
                     return true;
                 }
                 return false;
             });
             setDataRequested(() => {
-                if (tabValue == TabValues.Data) {
-                    dispatch(
-                        createSendActionNameAction(id, module, props.onIdSelect, {
-                            data_id: newDnId,
-                        })
+                if (showData && tabValue == TabValues.Data && dn[DataNodeFullProps.data][DatanodeDataProps.tabular]) {
+                    const idVar = getUpdateVar(updateDnVars, "data_id");
+                    const vars = getUpdateVarNames(updateVars, "tabularData", "tabularColumns");
+                    setTimeout(
+                        () =>
+                            dispatch(
+                                createRequestUpdateAction(
+                                    id,
+                                    module,
+                                    vars,
+                                    true,
+                                    idVar ? { [idVar]: newDnId } : undefined
+                                )
+                            ),
+                        1
+                    );
+                    return true;
+                }
+                return false;
+            });
+            setPropertiesRequested((req) => {
+                if ((req || !showData) && tabValue == TabValues.Properties) {
+                    const idVar = getUpdateVar(updateDnVars, "properties_id");
+                    const vars = getUpdateVarNames(updateVars, "properties");
+                    setTimeout(
+                        () =>
+                            dispatch(
+                                createRequestUpdateAction(
+                                    id,
+                                    module,
+                                    vars,
+                                    true,
+                                    idVar ? { [idVar]: newDnId } : undefined
+                                )
+                            ),
+                        1
                     );
                     return true;
                 }
@@ -320,7 +435,7 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
             return dn;
         });
         // eslint-disable-next-line react-hooks/exhaustive-deps
-    }, [props.dataNode, props.defaultDataNode, showData, id, dispatch, module, props.onLock, props.onIdSelect]);
+    }, [props.dataNode, props.defaultDataNode, showData, id, dispatch, module, props.onLock]);
 
     // clean lock on unmount
     useEffect(
@@ -338,6 +453,7 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
     // history & data
     const [historyRequested, setHistoryRequested] = useState(false);
     const [dataRequested, setDataRequested] = useState(false);
+    const [propertiesRequested, setPropertiesRequested] = useState(false);
 
     // userExpanded
     const [userExpanded, setUserExpanded] = useState(valid && expanded);
@@ -378,48 +494,49 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
             setLabel(dnLabel);
             setFocusName("");
         },
-        [dnLabel, setLabel, setFocusName]
+        [dnLabel]
     );
     const onLabelChange = useCallback((e: ChangeEvent<HTMLInputElement>) => setLabel(e.target.value), []);
 
     // scenarios
     const [anchorEl, setAnchorEl] = useState<HTMLElement | null>(null);
+    const scenarioUpdateVars = useMemo(() => getUpdateVarNames(updateVars, "scenario", "scenarios"), [updateVars]);
     const showScenarios = useCallback(
         (e: MouseEvent<HTMLElement>) => {
             e.stopPropagation();
             if (valid) {
-                dispatch(createSendActionNameAction(id, module, props.onIdSelect, { owner_id: dnOwnerId }));
+                const ownerIdVar = getUpdateVar(updateDnVars, "owner_id");
+                dispatch(
+                    createRequestUpdateAction(
+                        id,
+                        module,
+                        scenarioUpdateVars,
+                        true,
+                        ownerIdVar ? { [ownerIdVar]: dnOwnerId } : undefined
+                    )
+                );
                 setAnchorEl(e.currentTarget);
             }
         },
-        [dnOwnerId, dispatch, id, valid, module, props.onIdSelect]
+        [dnOwnerId, valid, updateDnVars, scenarioUpdateVars, dispatch, id, module]
     );
     const handleClose = useCallback(() => setAnchorEl(null), []);
-    const scenarioUpdateVars = useMemo(
-        () => [getUpdateVar(props.updateVars, "scenario"), getUpdateVar(props.updateVars, "scenarios")],
-        [props.updateVars]
-    );
 
     const [comment, setComment] = useState("");
-    const changeComment = useCallback((e: ChangeEvent<HTMLInputElement>) => {
-        setComment(e.currentTarget.value);
-    }, []);
+    const changeComment = useCallback((e: ChangeEvent<HTMLInputElement>) => setComment(e.currentTarget.value), []);
 
     // on datanode change
     useEffect(() => {
         setLabel(dnLabel);
         setUserExpanded(expanded && valid);
         setHistoryRequested(false);
-        setDataRequested(true);
+        setDataRequested(showData);
+        setPropertiesRequested(!showData);
         setViewType(TableViewType);
         setComment("");
-    }, [dnId, dnLabel, valid, expanded]);
+    }, [dnId, dnLabel, valid, expanded, showData]);
 
     // Datanode data
-    const dtType = props.data && props.data[DatanodeDataProps.type];
-    const dtValue = (props.data && props.data[DatanodeDataProps.value]) ?? (dtType == "float" ? null : undefined);
-    const dtTabular = (props.data && props.data[DatanodeDataProps.tabular]) ?? false;
-    const dtError = props.data && props.data[DatanodeDataProps.error];
     const [dataValue, setDataValue] = useState<RowValue | Date>();
     const editDataValue = useCallback(
         (e: MouseEvent<HTMLElement>) => {
@@ -445,7 +562,7 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
             setFocusName("");
             dispatch(createSendActionNameAction(id, module, props.onLock, { id: dnId, lock: false }));
         },
-        [dtValue, dtType, dnId, id, dispatch, module, props.onLock, setDataValue, setFocusName]
+        [dtValue, dtType, dnId, id, dispatch, module, props.onLock]
     );
     const onDataValueChange = useCallback((e: ChangeEvent<HTMLInputElement>) => setDataValue(e.target.value), []);
     const onDataValueDateChange = useCallback((d: Date | null) => d && setDataValue(d), []);
@@ -460,11 +577,20 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
     const onViewTypeChange = useCallback(
         (e: MouseEvent, value?: string) => {
             if (value) {
-                dispatch(createSendActionNameAction(id, module, props.onIdSelect, { chart_id: dnId }));
+                const idVar = getUpdateVar(updateDnVars, "chart_id");
+                dispatch(
+                    createRequestUpdateAction(
+                        id,
+                        module,
+                        getUpdateVarNames(updateVars, "chartConfig"),
+                        true,
+                        idVar ? { [idVar]: dnId } : undefined
+                    )
+                );
                 setViewType(value);
             }
         },
-        [dnId, dispatch, id, module, props.onIdSelect]
+        [dnId, updateVars, updateDnVars, dispatch, id, module]
     );
 
     // base tabular columns
@@ -694,7 +820,11 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
                                     entityId={dnId}
                                     active={active}
                                     isDefined={valid}
-                                    entProperties={dnProperties}
+                                    entProperties={
+                                        propertiesRequested && Array.isArray(props.dnProperties)
+                                            ? props.dnProperties
+                                            : []
+                                    }
                                     show={showProperties}
                                     focusName={focusName}
                                     setFocusName={setFocusName}
@@ -757,161 +887,162 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
                             id={`${uniqid}-dn-tabpanel-data`}
                             aria-labelledby={`${uniqid}-data`}
                         >
-                            {dataRequested ? (
-                                dtValue !== undefined ? (
-                                    <Grid container justifyContent="space-between" spacing={1}>
-                                        <Grid
-                                            item
-                                            container
-                                            xs={12}
-                                            justifyContent="space-between"
-                                            data-focus={dataValueFocus}
-                                            onClick={onFocus}
-                                            sx={hoverSx}
-                                        >
-                                            {active &&
-                                            dnEditable &&
-                                            dnEditInProgress &&
-                                            dnEditorId === editorId &&
-                                            focusName === dataValueFocus ? (
-                                                <>
+                            {dtValue !== undefined ? (
+                                <Grid container justifyContent="space-between" spacing={1}>
+                                    <Grid
+                                        item
+                                        container
+                                        xs={12}
+                                        justifyContent="space-between"
+                                        data-focus={dataValueFocus}
+                                        onClick={onFocus}
+                                        sx={hoverSx}
+                                    >
+                                        {active &&
+                                        dnEditable &&
+                                        dnEditInProgress &&
+                                        dnEditorId === editorId &&
+                                        focusName === dataValueFocus ? (
+                                            <>
+                                                {typeof dtValue == "boolean" ? (
+                                                    <>
+                                                        <Grid item xs={10}>
+                                                            <Switch
+                                                                value={dataValue as boolean}
+                                                                onChange={onDataValueChange}
+                                                            />
+                                                        </Grid>
+                                                        <Grid item xs={2}>
+                                                            <Tooltip title="Apply">
+                                                                <IconButton
+                                                                    onClick={editDataValue}
+                                                                    size="small"
+                                                                    sx={IconPaddingSx}
+                                                                >
+                                                                    <CheckCircle color="primary" />
+                                                                </IconButton>
+                                                            </Tooltip>
+                                                            <Tooltip title="Cancel">
+                                                                <IconButton
+                                                                    onClick={cancelDataValue}
+                                                                    size="small"
+                                                                    sx={IconPaddingSx}
+                                                                >
+                                                                    <Cancel color="inherit" />
+                                                                </IconButton>
+                                                            </Tooltip>
+                                                        </Grid>
+                                                    </>
+                                                ) : dtType == "date" &&
+                                                  (dataValue === null || dataValue instanceof Date) ? (
+                                                    <LocalizationProvider dateAdapter={AdapterDateFns}>
+                                                        <Grid item xs={10}>
+                                                            <DateTimePicker
+                                                                value={dataValue as Date}
+                                                                onChange={onDataValueDateChange}
+                                                                slotProps={textFieldProps}
+                                                            />
+                                                        </Grid>
+                                                        <Grid item xs={2}>
+                                                            <Tooltip title="Apply">
+                                                                <IconButton
+                                                                    onClick={editDataValue}
+                                                                    size="small"
+                                                                    sx={IconPaddingSx}
+                                                                >
+                                                                    <CheckCircle color="primary" />
+                                                                </IconButton>
+                                                            </Tooltip>
+                                                            <Tooltip title="Cancel">
+                                                                <IconButton
+                                                                    onClick={cancelDataValue}
+                                                                    size="small"
+                                                                    sx={IconPaddingSx}
+                                                                >
+                                                                    <Cancel color="inherit" />
+                                                                </IconButton>
+                                                            </Tooltip>
+                                                        </Grid>
+                                                    </LocalizationProvider>
+                                                ) : (
+                                                    <TextField
+                                                        label="Value"
+                                                        variant="outlined"
+                                                        fullWidth
+                                                        sx={FieldNoMaxWidth}
+                                                        value={dataValue || ""}
+                                                        onChange={onDataValueChange}
+                                                        type={
+                                                            typeof dtValue == "number"
+                                                                ? "number"
+                                                                : dtType == "float" && dtValue === null
+                                                                ? "number"
+                                                                : undefined
+                                                        }
+                                                        InputProps={{
+                                                            endAdornment: (
+                                                                <InputAdornment position="end">
+                                                                    <Tooltip title="Apply">
+                                                                        <IconButton
+                                                                            sx={IconPaddingSx}
+                                                                            onClick={editDataValue}
+                                                                            size="small"
+                                                                        >
+                                                                            <CheckCircle color="primary" />
+                                                                        </IconButton>
+                                                                    </Tooltip>
+                                                                    <Tooltip title="Cancel">
+                                                                        <IconButton
+                                                                            sx={IconPaddingSx}
+                                                                            onClick={cancelDataValue}
+                                                                            size="small"
+                                                                        >
+                                                                            <Cancel color="inherit" />
+                                                                        </IconButton>
+                                                                    </Tooltip>
+                                                                </InputAdornment>
+                                                            ),
+                                                        }}
+                                                        disabled={!valid}
+                                                    />
+                                                )}
+                                                <TextField
+                                                    value={comment}
+                                                    onChange={changeComment}
+                                                    label="Comment"
+                                                ></TextField>
+                                            </>
+                                        ) : (
+                                            <>
+                                                <Grid item xs={4}>
+                                                    <Typography variant="subtitle2">Value</Typography>
+                                                </Grid>
+                                                <Grid item xs={8}>
                                                     {typeof dtValue == "boolean" ? (
-                                                        <>
-                                                            <Grid item xs={10}>
-                                                                <Switch
-                                                                    value={dataValue as boolean}
-                                                                    onChange={onDataValueChange}
-                                                                />
-                                                            </Grid>
-                                                            <Grid item xs={2}>
-                                                                <Tooltip title="Apply">
-                                                                    <IconButton
-                                                                        onClick={editDataValue}
-                                                                        size="small"
-                                                                        sx={IconPaddingSx}
-                                                                    >
-                                                                        <CheckCircle color="primary" />
-                                                                    </IconButton>
-                                                                </Tooltip>
-                                                                <Tooltip title="Cancel">
-                                                                    <IconButton
-                                                                        onClick={cancelDataValue}
-                                                                        size="small"
-                                                                        sx={IconPaddingSx}
-                                                                    >
-                                                                        <Cancel color="inherit" />
-                                                                    </IconButton>
-                                                                </Tooltip>
-                                                            </Grid>
-                                                        </>
-                                                    ) : dtType == "date" && (dataValue === null || dataValue instanceof Date)  ? (
-                                                        <LocalizationProvider dateAdapter={AdapterDateFns}>
-                                                            <Grid item xs={10}>
-                                                                <DateTimePicker
-                                                                    value={dataValue as Date}
-                                                                    onChange={onDataValueDateChange}
-                                                                    slotProps={textFieldProps}
-                                                                />
-                                                            </Grid>
-                                                            <Grid item xs={2}>
-                                                                <Tooltip title="Apply">
-                                                                    <IconButton
-                                                                        onClick={editDataValue}
-                                                                        size="small"
-                                                                        sx={IconPaddingSx}
-                                                                    >
-                                                                        <CheckCircle color="primary" />
-                                                                    </IconButton>
-                                                                </Tooltip>
-                                                                <Tooltip title="Cancel">
-                                                                    <IconButton
-                                                                        onClick={cancelDataValue}
-                                                                        size="small"
-                                                                        sx={IconPaddingSx}
-                                                                    >
-                                                                        <Cancel color="inherit" />
-                                                                    </IconButton>
-                                                                </Tooltip>
-                                                            </Grid>
-                                                        </LocalizationProvider>
-                                                    ) : (
-                                                        <TextField
-                                                            label="Value"
-                                                            variant="outlined"
-                                                            fullWidth
-                                                            sx={FieldNoMaxWidth}
-                                                            value={dataValue || ""}
-                                                            onChange={onDataValueChange}
-                                                            type={
-                                                                typeof dtValue == "number"
-                                                                    ? "number"
-                                                                    : dtType == "float" && dtValue === null
-                                                                    ? "number"
-                                                                    : undefined
-                                                            }
-                                                            InputProps={{
-                                                                endAdornment: (
-                                                                    <InputAdornment position="end">
-                                                                        <Tooltip title="Apply">
-                                                                            <IconButton
-                                                                                sx={IconPaddingSx}
-                                                                                onClick={editDataValue}
-                                                                                size="small"
-                                                                            >
-                                                                                <CheckCircle color="primary" />
-                                                                            </IconButton>
-                                                                        </Tooltip>
-                                                                        <Tooltip title="Cancel">
-                                                                            <IconButton
-                                                                                sx={IconPaddingSx}
-                                                                                onClick={cancelDataValue}
-                                                                                size="small"
-                                                                            >
-                                                                                <Cancel color="inherit" />
-                                                                            </IconButton>
-                                                                        </Tooltip>
-                                                                    </InputAdornment>
-                                                                ),
-                                                            }}
-                                                            disabled={!valid}
+                                                        <Switch
+                                                            defaultChecked={dtValue}
+                                                            disabled={true}
+                                                            title={`${dtValue}`}
                                                         />
+                                                    ) : (
+                                                        <Typography variant="subtitle2">
+                                                            {dtType == "date"
+                                                                ? (dataValue === null || dataValue instanceof Date) &&
+                                                                  format(dataValue as Date, "yyyy/MM/dd HH:mm:ss")
+                                                                : dtType == "float" && dtValue === null
+                                                                ? "NaN"
+                                                                : dtValue}
+                                                        </Typography>
                                                     )}
-                                                    <TextField
-                                                        value={comment}
-                                                        onChange={changeComment}
-                                                        label="Comment"
-                                                    ></TextField>
-                                                </>
-                                            ) : (
-                                                <>
-                                                    <Grid item xs={4}>
-                                                        <Typography variant="subtitle2">Value</Typography>
-                                                    </Grid>
-                                                    <Grid item xs={8}>
-                                                        {typeof dtValue == "boolean" ? (
-                                                            <Switch
-                                                                defaultChecked={dtValue}
-                                                                disabled={true}
-                                                                title={`${dtValue}`}
-                                                            />
-                                                        ) : (
-                                                            <Typography variant="subtitle2">
-                                                                {dtType == "date"
-                                                                    ? (dataValue === null || dataValue instanceof Date) &&
-                                                                      format(dataValue as Date, "yyyy/MM/dd HH:mm:ss")
-                                                                    : dtType == "float" && dtValue === null
-                                                                    ? "NaN"
-                                                                    : dtValue}
-                                                            </Typography>
-                                                        )}
-                                                    </Grid>
-                                                </>
-                                            )}
-                                        </Grid>
+                                                </Grid>
+                                            </>
+                                        )}
                                     </Grid>
-                                ) : dtError ? (
-                                    <Typography>{dtError}</Typography>
-                                ) : dtTabular ? (
+                                </Grid>
+                            ) : dtError ? (
+                                <Typography>{dtError}</Typography>
+                            ) : dtTabular ? (
+                                dataRequested ? (
                                     <>
                                         {viewType === TableViewType ? (
                                             <DataNodeTable
@@ -922,12 +1053,13 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
                                                 nodeId={dnId}
                                                 configId={dnConfig}
                                                 onViewTypeChange={onViewTypeChange}
-                                                updateVarName={getUpdateVar(props.updateVars, "tabularData")}
+                                                updateVarName={getUpdateVar(updateVars, "tabularData")}
                                                 onEdit={props.onTabularDataEdit}
                                                 onLock={props.onLock}
                                                 editInProgress={dnEditInProgress && dnEditorId !== editorId}
                                                 editLock={editLock}
                                                 editable={dnEditable}
+                                                idVar={getUpdateVar(updateDnVars, "data_id")}
                                             />
                                         ) : (
                                             <DataNodeChart
@@ -937,7 +1069,7 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
                                                 tabularData={props.tabularData}
                                                 configId={dnConfig}
                                                 defaultConfig={props.chartConfig}
-                                                updateVarName={getUpdateVar(props.updateVars, "tabularData")}
+                                                updateVarName={getUpdateVar(updateVars, "tabularData")}
                                                 chartConfigs={props.chartConfigs}
                                                 onViewTypeChange={onViewTypeChange}
                                             />
@@ -952,7 +1084,7 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
                         </div>
                     </AccordionDetails>
                 </Accordion>
-                <Box>{props.error}</Box>
+                {props.error ? <Alert severity="error">{props.error}</Alert> : null}
             </Box>
         </>
     );

+ 3 - 1
frontend/taipy/src/NodeSelector.tsx

@@ -36,10 +36,11 @@ interface NodeSelectorProps {
     className?: string;
     dynamicClassName?: string;
     showPins?: boolean;
+    multiple?: boolean;
 }
 
 const NodeSelector = (props: NodeSelectorProps) => {
-    const { showPins = true } = props;
+    const { showPins = true, multiple = false } = props;
     const className = useClassNames(props.libClassName, props.dynamicClassName, props.className);
     return (
         <Box sx={MainTreeBoxSx} id={props.id} className={className}>
@@ -49,6 +50,7 @@ const NodeSelector = (props: NodeSelectorProps) => {
                 leafType={NodeType.NODE}
                 lovPropertyName="datanodes"
                 showPins={showPins}
+                multiple={multiple}
             />
             <Box>{props.error}</Box>
         </Box>

+ 9 - 7
frontend/taipy/src/PropertiesEditor.tsx

@@ -36,12 +36,14 @@ type PropertiesEditPayload = {
     deleted_properties?: Array<Partial<Property>>;
 };
 
+export type DatanodeProperties = Array<[string, string]>;
+
 interface PropertiesEditorProps {
     id?: string;
     entityId: string;
     active: boolean;
     show: boolean;
-    entProperties: Array<[string, string]>;
+    entProperties: DatanodeProperties;
     onFocus: (e: MouseEvent<HTMLElement>) => void;
     focusName: string;
     setFocusName: (name: string) => void;
@@ -124,8 +126,8 @@ const PropertiesEditor = (props: PropertiesEditorProps) => {
                     e.stopPropagation();
                 }
             }
-
-        }, [editProperty, cancelProperty]
+        },
+        [editProperty, cancelProperty]
     );
 
     const deleteProperty = useCallback(
@@ -187,7 +189,7 @@ const PropertiesEditor = (props: PropertiesEditorProps) => {
                                                   data-name="key"
                                                   data-id={property.id}
                                                   onChange={updatePropertyField}
-                                                  inputProps={{onKeyDown}}
+                                                  inputProps={{ onKeyDown }}
                                               />
                                           </Grid>
                                           <Grid item xs={5}>
@@ -200,7 +202,7 @@ const PropertiesEditor = (props: PropertiesEditorProps) => {
                                                   data-name="value"
                                                   data-id={property.id}
                                                   onChange={updatePropertyField}
-                                                  inputProps={{onKeyDown, "data-enter": true}}
+                                                  inputProps={{ onKeyDown, "data-enter": true }}
                                               />
                                           </Grid>
                                           <Grid
@@ -293,7 +295,7 @@ const PropertiesEditor = (props: PropertiesEditorProps) => {
                                     variant="outlined"
                                     sx={FieldNoMaxWidth}
                                     disabled={!isDefined}
-                                    inputProps={{onKeyDown}}
+                                    inputProps={{ onKeyDown }}
                                 />
                             </Grid>
                             <Grid item xs={5}>
@@ -305,7 +307,7 @@ const PropertiesEditor = (props: PropertiesEditorProps) => {
                                     variant="outlined"
                                     sx={FieldNoMaxWidth}
                                     disabled={!isDefined}
-                                    inputProps={{onKeyDown, "data-enter": true}}
+                                    inputProps={{ onKeyDown, "data-enter": true }}
                                 />
                             </Grid>
                             <Grid

+ 20 - 7
frontend/taipy/src/ScenarioSelector.tsx

@@ -67,7 +67,7 @@ interface ScenarioSelectorProps {
     showPrimaryFlag?: boolean;
     updateVarName?: string;
     updateVars: string;
-    scenarios?: Cycles | Scenarios;
+    innerScenarios?: Cycles | Scenarios;
     onScenarioCrud: string;
     onChange?: string;
     onCreation?: string;
@@ -85,6 +85,7 @@ interface ScenarioSelectorProps {
     dynamicClassName?: string;
     showPins?: boolean;
     showDialog?: boolean;
+    multiple?: boolean;
 }
 
 interface ScenarioEditDialogProps {
@@ -278,7 +279,9 @@ const ScenarioEditDialog = ({ scenario, submit, open, actionEdit, configs, close
                                         <DatePicker
                                             label="Date"
                                             value={new Date(form.values.date)}
-                                            onChange={(date?:Date|null) => form.setFieldValue("date", date?.toISOString())}
+                                            onChange={(date?: Date | null) =>
+                                                form.setFieldValue("date", date?.toISOString())
+                                            }
                                             disabled={actionEdit}
                                         />
                                     </LocalizationProvider>
@@ -411,7 +414,7 @@ const ScenarioEditDialog = ({ scenario, submit, open, actionEdit, configs, close
 };
 
 const ScenarioSelector = (props: ScenarioSelectorProps) => {
-    const { showAddButton = true, propagate = true, showPins = false, showDialog = true } = props;
+    const { showAddButton = true, propagate = true, showPins = false, showDialog = true, multiple = false } = props;
     const [open, setOpen] = useState(false);
     const [actionEdit, setActionEdit] = useState<boolean>(false);
 
@@ -422,10 +425,19 @@ const ScenarioSelector = (props: ScenarioSelectorProps) => {
 
     const onSubmit = useCallback(
         (...values: unknown[]) => {
-            dispatch(createSendActionNameAction(props.id, module, props.onScenarioCrud, props.onCreation, props.updateVarName, ...values));
+            dispatch(
+                createSendActionNameAction(
+                    props.id,
+                    module,
+                    props.onScenarioCrud,
+                    props.onCreation,
+                    props.updateVarName,
+                    ...values
+                )
+            );
             if (values.length > 1 && values[1]) {
                 // delete requested => unselect current node
-                const lovVar = getUpdateVar(props.updateVars, "scenarios");
+                const lovVar = getUpdateVar(props.updateVars, "innerScenarios");
                 dispatch(
                     createSendUpdateAction(props.updateVarName, undefined, module, props.onChange, propagate, lovVar)
                 );
@@ -487,11 +499,12 @@ const ScenarioSelector = (props: ScenarioSelectorProps) => {
             <Box sx={MainTreeBoxSx} id={props.id} className={className}>
                 <CoreSelector
                     {...props}
-                    entities={props.scenarios}
+                    entities={props.innerScenarios}
                     leafType={NodeType.SCENARIO}
-                    lovPropertyName="scenarios"
+                    lovPropertyName="innerScenarios"
                     editComponent={EditScenario}
                     showPins={showPins}
+                    multiple={multiple}
                 />
                 {showAddButton ? (
                     <Button variant="outlined" onClick={onDialogOpen} fullWidth endIcon={<Add />}>

+ 3 - 1
frontend/taipy/src/utils.ts

@@ -13,7 +13,7 @@
 import { Theme, alpha } from "@mui/material";
 import { PopoverOrigin } from "@mui/material/Popover";
 
-import { useDynamicProperty } from "taipy-gui";
+import { getUpdateVar, useDynamicProperty } from "taipy-gui";
 
 export type ScenarioFull = [
     string,     // id
@@ -218,3 +218,5 @@ export const DeleteIconSx = { height: 50, width: 50, p: 0 };
 
 
 export const EmptyArray = [];
+
+export const getUpdateVarNames = (updateVars: string, ...vars: string[]) => vars.map((v) => getUpdateVar(updateVars, v) || "").filter(v => v);

+ 7 - 2
taipy/_cli/_scaffold_cli.py

@@ -12,6 +12,7 @@
 import pathlib
 import sys
 
+from cookiecutter.exceptions import OutputDirExistsException
 from cookiecutter.main import cookiecutter
 
 import taipy
@@ -45,6 +46,10 @@ class _ScaffoldCLI(_AbstractCLI):
         args = cls._parse_arguments()
         if not args:
             return
-
-        cookiecutter(cls._TEMPLATE_MAP[args.template])
+        try:
+            cookiecutter(cls._TEMPLATE_MAP[args.template])
+        except OutputDirExistsException as err:
+            error_msg = f"{str(err)}. Please remove the existing directory or provide a new folder name."
+            print(error_msg)  # noqa: T201
+            sys.exit(1)
         sys.exit(0)

+ 6 - 4
taipy/core/_entity/_properties.py

@@ -11,6 +11,8 @@
 
 from collections import UserDict
 
+from taipy.config.common._template_handler import _TemplateHandler as _tpl
+
 from ..notification import EventOperation, Notifier, _make_event
 
 
@@ -25,9 +27,10 @@ class _Properties(UserDict):
 
     def __setitem__(self, key, value):
         super(_Properties, self).__setitem__(key, value)
-        from ... import core as tp
 
         if hasattr(self, "_entity_owner"):
+            from ... import core as tp
+
             event = _make_event(
                 self._entity_owner,
                 EventOperation.UPDATE,
@@ -44,15 +47,14 @@ class _Properties(UserDict):
                 self._entity_owner._in_context_attributes_changed_collector.append(event)
 
     def __getitem__(self, key):
-        from taipy.config.common._template_handler import _TemplateHandler as _tpl
-
         return _tpl._replace_templates(super(_Properties, self).__getitem__(key))
 
     def __delitem__(self, key):
         super(_Properties, self).__delitem__(key)
-        from ... import core as tp
 
         if hasattr(self, "_entity_owner"):
+            from ... import core as tp
+
             event = _make_event(
                 self._entity_owner,
                 EventOperation.UPDATE,

+ 7 - 0
taipy/core/_manager/_manager.py

@@ -157,6 +157,13 @@ class _Manager(Generic[EntityType]):
     def _export(cls, id: str, folder_path: Union[str, pathlib.Path], **kwargs):
         return cls._repository._export(id, folder_path)
 
+    @classmethod
+    def _import(cls, entity_file: pathlib.Path, version: str, **kwargs) -> EntityType:
+        imported_entity = cls._repository._import(entity_file)
+        imported_entity._version = version
+        cls._set(imported_entity)
+        return imported_entity
+
     @classmethod
     def _is_editable(cls, entity: Union[EntityType, str]) -> bool:
         return True

+ 28 - 0
taipy/core/_repository/_abstract_repository.py

@@ -9,10 +9,14 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+import json
 import pathlib
 from abc import abstractmethod
 from typing import Any, Dict, Generic, Iterable, List, Optional, TypeVar, Union
 
+from ..exceptions import FileCannotBeRead
+from ._decoder import _Decoder
+
 ModelType = TypeVar("ModelType")
 Entity = TypeVar("Entity")
 
@@ -122,3 +126,27 @@ class _AbstractRepository(Generic[ModelType, Entity]):
             folder_path (Union[str, pathlib.Path]): The folder path to export the entity to.
         """
         raise NotImplementedError
+
+    def _import(self, entity_file_path: pathlib.Path) -> Entity:
+        """
+        Import an entity from an exported file.
+
+        Parameters:
+            folder_path (Union[str, pathlib.Path]): The folder path to export the entity to.
+
+        Returns:
+            The imported entity.
+        """
+        if not entity_file_path.is_file():
+            raise FileNotFoundError
+
+        try:
+            with entity_file_path.open("r", encoding="UTF-8") as f:
+                file_content = f.read()
+        except Exception:
+            raise FileCannotBeRead(str(entity_file_path)) from None
+
+        if isinstance(file_content, str):
+            file_content = json.loads(file_content, cls=_Decoder)
+        model = self.model_type.from_dict(file_content)  # type: ignore[attr-defined]
+        return self.converter._model_to_entity(model)  # type: ignore[attr-defined]

+ 15 - 0
taipy/core/_version/_version_manager.py

@@ -9,6 +9,7 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+import pathlib
 import uuid
 from typing import List, Optional, Union
 
@@ -230,3 +231,17 @@ class _VersionManager(_Manager[_Version]):
     @classmethod
     def _delete_entities_of_multiple_types(cls, _entity_ids):
         raise NotImplementedError
+
+    @classmethod
+    def _import(cls, entity_file: pathlib.Path, version: str, **kwargs) -> _Version:
+        imported_version = cls._repository._import(entity_file)
+
+        comparator_result = Config._comparator._find_conflict_config(  # type: ignore[attr-defined]
+            imported_version.config,
+            Config._applied_config,  # type: ignore[attr-defined]
+            imported_version.id,
+        )
+        if comparator_result.get(_ComparatorResult.CONFLICTED_SECTION_KEY):
+            raise ConflictedConfigurationError()
+
+        return imported_version

+ 3 - 2
taipy/core/_version/_version_manager_factory.py

@@ -9,6 +9,8 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+from typing import Type
+
 from .._manager._manager_factory import _ManagerFactory
 from ..common import _utils
 from ._version_fs_repository import _VersionFSRepository
@@ -17,11 +19,10 @@ from ._version_sql_repository import _VersionSQLRepository
 
 
 class _VersionManagerFactory(_ManagerFactory):
-
     __REPOSITORY_MAP = {"default": _VersionFSRepository, "sql": _VersionSQLRepository}
 
     @classmethod
-    def _build_manager(cls) -> _VersionManager:  # type: ignore
+    def _build_manager(cls) -> Type[_VersionManager]:
         if cls._using_enterprise():
             version_manager = _utils._load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + "._version._version_manager", "_VersionManager"

+ 97 - 84
taipy/core/config/config.schema.json

@@ -100,7 +100,7 @@
               "generic",
               "parquet",
               "s3_object",
-              "",
+              ""
             ],
             "default": "pickle"
           },
@@ -237,47 +237,33 @@
             "description": "storage_type: parquet specific.Additional parameters when writing parquet files, default is an empty dictionary",
             "type": "object"
           },
-          "aws_access_key":{
+          "aws_access_key": {
             "description": "storage_type: s3_object specific.Amazon Storage public key",
             "type": "string"
           },
-          "aws_secret_access_key":{
+          "aws_secret_access_key": {
             "description": "storage_type: s3_object specific.Amazon Storage secret key",
             "type": "string"
           },
-          "aws_s3_bucket_name":{
+          "aws_s3_bucket_name": {
             "description": "storage_type: s3_object specific.Name of Bucket",
             "type": "string"
           },
-          "aws_s3_object_key":{
+          "aws_s3_object_key": {
             "description": "storage_type: s3_object specific.File name",
             "type": "string"
           },
-          "aws_region":{
+          "aws_region": {
             "description": "storage_type: s3_object specific.Bucket Location",
             "type": "string"
           },
-          "aws_s3_object_parameters":{
+          "aws_s3_object_parameters": {
             "description": "storage_type: s3_object specific.Additional parameters when accessing s3 object, default is an empty dictionary",
             "type": "array"
-          },
-          "if": {
-            "properties": {
-              "storage_type": {
-                "enum": [
-                  "csv",
-                  "excel",
-                  "json"
-                ]
-              }
-            }
-          },
-          "then": {
-            "required": [
-              "default_path"
-            ]
-          },
-          "else": {
+          }
+        },
+        "allOf": [
+          {
             "if": {
               "properties": {
                 "storage_type": {
@@ -290,84 +276,111 @@
                 "read_fct",
                 "write_fct"
               ]
+            }
+          },
+          {
+            "if": {
+              "properties": {
+                "storage_type": {
+                  "const": "sql"
+                }
+              }
             },
-            "else": {
+            "then": {
               "if": {
                 "properties": {
-                  "storage_type": {
-                    "enum": [
-                      "sql",
-                      "sql_table",
-                      "mongo_collection"
-                    ]
+                  "db_engine": {
+                    "const": "sqlite"
                   }
                 }
               },
               "then": {
                 "required": [
-                  "db_name"
-                ],
-            "else": {
+                  "db_name",
+                  "db_engine",
+                  "read_query",
+                  "write_query_builder"
+                ]
+              },
+              "else": {
+                "required": [
+                  "db_username",
+                  "db_password",
+                  "db_name",
+                  "db_engine",
+                  "read_query",
+                  "write_query_builder"
+                ]
+              }
+            }
+          },
+          {
+            "if": {
+              "properties": {
+                "storage_type": {
+                  "const": "sql_table"
+                }
+              }
+            },
+            "then": {
               "if": {
                 "properties": {
-                  "storage_type": {
-                    "enum": [
-                      "s3_object",
-                    ]
+                  "db_engine": {
+                    "const": "sqlite"
                   }
                 }
               },
               "then": {
                 "required": [
-                  "aws_access_key",
-                  "aws_secret_access_key",
-                  "aws_s3_bucket_name",
-                  "aws_s3_object_key"
-                ],
-                "if": {
-                  "properties": {
-                    "storage_type": {
-                      "enum": [
-                        "sql",
-                        "sql_table"
-                      ]
-                    }
-                  }
-                },
-                "then": {
-                  "required": [
-                    "db_username",
-                    "db_password",
-                    "db_engine"
-                  ],
-                  "if": {
-                    "properties": {
-                      "storage_type": {
-                        "const": "sql"
-                      }
-                    }
-                  },
-                  "then": {
-                    "required": [
-                      "read_query",
-                      "write_query_builder"
-                    ]
-                  },
-                  "else": {
-                    "required": [
-                      "table_name"
-                    ]
-                  }
-                },
-                "else": {
-                  "required": [
-                    "collection_name"
-                  ]
+                  "db_name",
+                  "db_engine",
+                  "table_name"
+                ]
+              },
+              "else": {
+                "required": [
+                  "db_username",
+                  "db_password",
+                  "db_name",
+                  "db_engine",
+                  "table_name"
+                ]
+              }
+            }
+          },
+          {
+            "if": {
+              "properties": {
+                "storage_type": {
+                  "const": "mongo_collection"
+                }
+              }
+            },
+            "then": {
+              "required": [
+                "db_name",
+                "collection_name"
+              ]
+            }
+          },
+          {
+            "if": {
+              "properties": {
+                "storage_type": {
+                  "const": "s3_object"
                 }
               }
+            },
+            "then": {
+              "required": [
+                "aws_access_key",
+                "aws_secret_access_key",
+                "aws_s3_bucket_name",
+                "aws_s3_object_key"
+              ]
             }
           }
-        }
+        ]
       }
     },
     "TASK": {

+ 1 - 2
taipy/core/cycle/_cycle_manager_factory.py

@@ -19,11 +19,10 @@ from ._cycle_sql_repository import _CycleSQLRepository
 
 
 class _CycleManagerFactory(_ManagerFactory):
-
     __REPOSITORY_MAP = {"default": _CycleFSRepository, "sql": _CycleSQLRepository}
 
     @classmethod
-    def _build_manager(cls) -> Type[_CycleManager]:  # type: ignore
+    def _build_manager(cls) -> Type[_CycleManager]:
         if cls._using_enterprise():
             cycle_manager = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".cycle._cycle_manager", "_CycleManager"

+ 1 - 1
taipy/core/cycle/cycle.py

@@ -157,7 +157,7 @@ class Cycle(_Entity, _Labeled):
         raise AttributeError(f"{attribute_name} is not an attribute of cycle {self.id}")
 
     def __eq__(self, other):
-        return self.id == other.id
+        return isinstance(other, Cycle) and self.id == other.id
 
     def __hash__(self):
         return hash(self.id)

+ 0 - 35
taipy/core/data/_abstract_file.py

@@ -1,35 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-import os
-import pathlib
-import shutil
-
-
-class _FileDataNodeMixin(object):
-    """Mixin class designed to handle file-based data nodes
-    (CSVDataNode, ParquetDataNode, ExcelDataNode, PickleDataNode, JSONDataNode, etc.)."""
-
-    __EXTENSION_MAP = {"csv": "csv", "excel": "xlsx", "parquet": "parquet", "pickle": "p", "json": "json"}
-
-    def _build_path(self, storage_type):
-        from taipy.config.config import Config
-
-        folder = f"{storage_type}s"
-        dir_path = pathlib.Path(Config.core.storage_folder) / folder
-        if not dir_path.exists():
-            dir_path.mkdir(parents=True, exist_ok=True)
-        return dir_path / f"{self.id}.{self.__EXTENSION_MAP.get(storage_type)}"
-
-    def _migrate_path(self, storage_type, old_path):
-        new_path = self._build_path(storage_type)
-        if os.path.exists(old_path):
-            shutil.move(old_path, new_path)
-        return new_path

+ 1 - 1
taipy/core/data/_abstract_sql.py

@@ -25,7 +25,7 @@ from taipy.config.common.scope import Scope
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..data.operator import JoinOperator, Operator
 from ..exceptions.exceptions import MissingRequiredProperty, UnknownDatabaseEngine
-from ._abstract_tabular import _TabularDataNodeMixin
+from ._tabular_datanode_mixin import _TabularDataNodeMixin
 from .data_node import DataNode
 from .data_node_id import DataNodeId, Edit
 

+ 29 - 12
taipy/core/data/_data_manager.py

@@ -26,11 +26,10 @@ from ..exceptions.exceptions import InvalidDataNodeType
 from ..notification import Event, EventEntityType, EventOperation, Notifier, _make_event
 from ..scenario.scenario_id import ScenarioId
 from ..sequence.sequence_id import SequenceId
-from ._abstract_file import _FileDataNodeMixin
 from ._data_fs_repository import _DataFSRepository
+from ._file_datanode_mixin import _FileDataNodeMixin
 from .data_node import DataNode
 from .data_node_id import DataNodeId
-from .pickle import PickleDataNode
 
 
 class _DataManager(_Manager[DataNode], _VersionMixin):
@@ -112,21 +111,21 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         return cls._repository._load_all(filters)
 
     @classmethod
-    def _clean_pickle_file(cls, data_node: DataNode):
-        if not isinstance(data_node, PickleDataNode):
+    def _clean_generated_file(cls, data_node: DataNode):
+        if not isinstance(data_node, _FileDataNodeMixin):
             return
         if data_node.is_generated and os.path.exists(data_node.path):
             os.remove(data_node.path)
 
     @classmethod
-    def _clean_pickle_files(cls, data_nodes: Iterable[DataNode]):
+    def _clean_generated_files(cls, data_nodes: Iterable[DataNode]):
         for data_node in data_nodes:
-            cls._clean_pickle_file(data_node)
+            cls._clean_generated_file(data_node)
 
     @classmethod
     def _delete(cls, data_node_id: DataNodeId):
         if data_node := cls._get(data_node_id, None):
-            cls._clean_pickle_file(data_node)
+            cls._clean_generated_file(data_node)
         super()._delete(data_node_id)
 
     @classmethod
@@ -135,19 +134,19 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         for data_node_id in data_node_ids:
             if data_node := cls._get(data_node_id):
                 data_nodes.append(data_node)
-        cls._clean_pickle_files(data_nodes)
+        cls._clean_generated_files(data_nodes)
         super()._delete_many(data_node_ids)
 
     @classmethod
     def _delete_all(cls):
         data_nodes = cls._get_all()
-        cls._clean_pickle_files(data_nodes)
+        cls._clean_generated_files(data_nodes)
         super()._delete_all()
 
     @classmethod
     def _delete_by_version(cls, version_number: str):
         data_nodes = cls._get_all(version_number)
-        cls._clean_pickle_files(data_nodes)
+        cls._clean_generated_files(data_nodes)
         cls._repository._delete_by(attribute="version", value=version_number)
         Notifier.publish(
             Event(EventEntityType.DATA_NODE, EventOperation.DELETION, metadata={"delete_by_version": version_number})
@@ -182,10 +181,28 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         else:
             folder = folder_path
 
-        data_export_dir = folder / Config.core.storage_folder
+        data_export_dir = folder / Config.core.storage_folder / os.path.dirname(data_node.path)
         if not data_export_dir.exists():
             data_export_dir.mkdir(parents=True)
 
         data_export_path = data_export_dir / os.path.basename(data_node.path)
         if os.path.exists(data_node.path):
-            shutil.copy(data_node.path, data_export_path)
+            shutil.copy2(data_node.path, data_export_path)
+
+    @classmethod
+    def _import(cls, entity_file: pathlib.Path, version: str, **kwargs) -> DataNode:
+        imported_data_node = cls._repository._import(entity_file)
+        imported_data_node._version = version
+        cls._set(imported_data_node)
+
+        if not (isinstance(imported_data_node, _FileDataNodeMixin) and isinstance(imported_data_node, DataNode)):
+            return imported_data_node
+
+        data_folder: pathlib.Path = pathlib.Path(str(kwargs.get("data_folder")))
+        if not data_folder.exists():
+            return imported_data_node
+
+        if (data_folder / imported_data_node.path).exists():
+            shutil.copy2(data_folder / imported_data_node.path, imported_data_node.path)
+
+        return imported_data_node

+ 1 - 2
taipy/core/data/_data_manager_factory.py

@@ -19,11 +19,10 @@ from ._data_sql_repository import _DataSQLRepository
 
 
 class _DataManagerFactory(_ManagerFactory):
-
     __REPOSITORY_MAP = {"default": _DataFSRepository, "sql": _DataSQLRepository}
 
     @classmethod
-    def _build_manager(cls) -> Type[_DataManager]:  # type: ignore
+    def _build_manager(cls) -> Type[_DataManager]:
         if cls._using_enterprise():
             data_manager = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".data._data_manager", "_DataManager"

+ 94 - 0
taipy/core/data/_file_datanode_mixin.py

@@ -0,0 +1,94 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+import pathlib
+import shutil
+from datetime import datetime
+from os.path import isfile
+from typing import Any, Dict, Optional
+
+from taipy.config.config import Config
+
+from .._entity._reload import _self_reload
+from .data_node import DataNode
+from .data_node_id import Edit
+
+
+class _FileDataNodeMixin(object):
+    """Mixin class designed to handle file-based data nodes
+    (CSVDataNode, ParquetDataNode, ExcelDataNode, PickleDataNode, JSONDataNode, etc.)."""
+
+    __EXTENSION_MAP = {"csv": "csv", "excel": "xlsx", "parquet": "parquet", "pickle": "p", "json": "json"}
+
+    _DEFAULT_DATA_KEY = "default_data"
+    _PATH_KEY = "path"
+    _DEFAULT_PATH_KEY = "default_path"
+    _IS_GENERATED_KEY = "is_generated"
+
+    def __init__(self, properties: Dict) -> None:
+        self._path: str = properties.get(self._PATH_KEY, properties.get(self._DEFAULT_PATH_KEY))
+        self._is_generated: bool = properties.get(self._IS_GENERATED_KEY, self._path is None)
+        self._last_edit_date: Optional[datetime] = None
+
+        if self._path and ".data" in self._path:
+            self._path = self._migrate_path(self.storage_type(), self._path)  # type: ignore[attr-defined]
+        if not self._path:
+            self._path = self._build_path(self.storage_type())  # type: ignore[attr-defined]
+
+        properties[self._IS_GENERATED_KEY] = self._is_generated
+        properties[self._PATH_KEY] = self._path
+
+    def _write_default_data(self, default_value: Any):
+        if default_value is not None and not os.path.exists(self._path):
+            self._write(default_value)  # type: ignore[attr-defined]
+            self._last_edit_date = DataNode._get_last_modified_datetime(self._path) or datetime.now()
+            self._edits.append(  # type: ignore[attr-defined]
+                Edit(
+                    {
+                        "timestamp": self._last_edit_date,
+                        "writer_identifier": "TAIPY",
+                        "comments": "Default data written.",
+                    }
+                )
+            )
+
+        if not self._last_edit_date and isfile(self._path):
+            self._last_edit_date = datetime.now()
+
+    @property  # type: ignore
+    @_self_reload(DataNode._MANAGER_NAME)
+    def is_generated(self) -> bool:
+        return self._is_generated
+
+    @property  # type: ignore
+    @_self_reload(DataNode._MANAGER_NAME)
+    def path(self) -> Any:
+        return self._path
+
+    @path.setter
+    def path(self, value):
+        self._path = value
+        self.properties[self._PATH_KEY] = value
+        self.properties[self._IS_GENERATED_KEY] = False
+
+    def _build_path(self, storage_type) -> str:
+        folder = f"{storage_type}s"
+        dir_path = pathlib.Path(Config.core.storage_folder) / folder
+        if not dir_path.exists():
+            dir_path.mkdir(parents=True, exist_ok=True)
+        return str(dir_path / f"{self.id}.{self.__EXTENSION_MAP.get(storage_type)}")  # type: ignore[attr-defined]
+
+    def _migrate_path(self, storage_type, old_path) -> str:
+        new_path = self._build_path(storage_type)
+        if os.path.exists(old_path):
+            shutil.move(old_path, new_path)
+        return new_path

+ 0 - 0
taipy/core/data/_abstract_tabular.py → taipy/core/data/_tabular_datanode_mixin.py


+ 16 - 48
taipy/core/data/csv.py

@@ -10,9 +10,7 @@
 # specific language governing permissions and limitations under the License.
 
 import csv
-import os
 from datetime import datetime, timedelta
-from os.path import isfile
 from typing import Any, Dict, List, Optional, Set
 
 import numpy as np
@@ -20,11 +18,10 @@ import pandas as pd
 
 from taipy.config.common.scope import Scope
 
-from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..job.job_id import JobId
-from ._abstract_file import _FileDataNodeMixin
-from ._abstract_tabular import _TabularDataNodeMixin
+from ._file_datanode_mixin import _FileDataNodeMixin
+from ._tabular_datanode_mixin import _TabularDataNodeMixin
 from .data_node import DataNode
 from .data_node_id import DataNodeId, Edit
 
@@ -64,10 +61,8 @@ class CSVDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
     """
 
     __STORAGE_TYPE = "csv"
-    __PATH_KEY = "path"
-    __DEFAULT_PATH_KEY = "default_path"
     __ENCODING_KEY = "encoding"
-    __DEFAULT_DATA_KEY = "default_data"
+
     _REQUIRED_PROPERTIES: List[str] = []
 
     def __init__(
@@ -86,11 +81,11 @@ class CSVDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
         editor_expiration_date: Optional[datetime] = None,
         properties: Optional[Dict] = None,
     ):
+        self.id = id or self._new_id(config_id)
+
         if properties is None:
             properties = {}
 
-        default_value = properties.pop(self.__DEFAULT_DATA_KEY, None)
-
         if self.__ENCODING_KEY not in properties.keys():
             properties[self.__ENCODING_KEY] = "utf-8"
 
@@ -100,11 +95,15 @@ class CSVDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
         properties[self._EXPOSED_TYPE_PROPERTY] = _TabularDataNodeMixin._get_valid_exposed_type(properties)
         self._check_exposed_type(properties[self._EXPOSED_TYPE_PROPERTY])
 
+        default_value = properties.pop(self._DEFAULT_DATA_KEY, None)
+        _FileDataNodeMixin.__init__(self, properties)
+        _TabularDataNodeMixin.__init__(self, **properties)
+
         DataNode.__init__(
             self,
             config_id,
             scope,
-            id,
+            self.id,
             owner_id,
             parent_ids,
             last_edit_date,
@@ -116,39 +115,18 @@ class CSVDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
             editor_expiration_date,
             **properties,
         )
-        _TabularDataNodeMixin.__init__(self, **properties)
 
-        self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
-        if self._path and ".data" in self._path:
-            self._path = self._migrate_path(self.storage_type(), self._path)
-
-        if not self._path:
-            self._path = self._build_path(self.storage_type())
-        properties[self.__PATH_KEY] = self._path
-
-        if default_value is not None and not os.path.exists(self._path):
-            self._write(default_value)
-            self._last_edit_date = datetime.now()
-            self._edits.append(
-                Edit(
-                    {
-                        "timestamp": self._last_edit_date,
-                        "writer_identifier": "TAIPY",
-                        "comments": "Default data written.",
-                    }
-                )
-            )
-        if not self._last_edit_date and isfile(self._path):
-            self._last_edit_date = datetime.now()
+        self._write_default_data(default_value)
 
         self._TAIPY_PROPERTIES.update(
             {
+                self._PATH_KEY,
+                self._DEFAULT_PATH_KEY,
+                self._DEFAULT_DATA_KEY,
+                self._IS_GENERATED_KEY,
+                self._HAS_HEADER_PROPERTY,
                 self._EXPOSED_TYPE_PROPERTY,
-                self.__PATH_KEY,
-                self.__DEFAULT_PATH_KEY,
                 self.__ENCODING_KEY,
-                self.__DEFAULT_DATA_KEY,
-                self._HAS_HEADER_PROPERTY,
             }
         )
 
@@ -156,16 +134,6 @@ class CSVDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
     def storage_type(cls) -> str:
         return cls.__STORAGE_TYPE
 
-    @property  # type: ignore
-    @_self_reload(DataNode._MANAGER_NAME)
-    def path(self):
-        return self._path
-
-    @path.setter
-    def path(self, value):
-        self._path = value
-        self.properties[self.__PATH_KEY] = value
-
     def _read(self):
         if self.properties[self._EXPOSED_TYPE_PROPERTY] == self._EXPOSED_TYPE_PANDAS:
             return self._read_as_pandas_dataframe()

+ 16 - 7
taipy/core/data/data_node.py

@@ -83,7 +83,7 @@ class DataNode(_Entity, _Labeled):
     __logger = _TaipyLogger._get_logger()
     _REQUIRED_PROPERTIES: List[str] = []
     _MANAGER_NAME: str = "data"
-    __PATH_KEY = "path"
+    _PATH_KEY = "path"
     __EDIT_TIMEOUT = 30
 
     _TAIPY_PROPERTIES: Set[str] = set()
@@ -105,7 +105,7 @@ class DataNode(_Entity, _Labeled):
         **kwargs,
     ) -> None:
         self._config_id = _validate_id(config_id)
-        self.id = id or DataNodeId(self.__ID_SEPARATOR.join([self._ID_PREFIX, self.config_id, str(uuid.uuid4())]))
+        self.id = id or self._new_id(self._config_id)
         self._owner_id = owner_id
         self._parent_ids = parent_ids or set()
         self._scope = scope
@@ -121,6 +121,13 @@ class DataNode(_Entity, _Labeled):
 
         self._properties = _Properties(self, **kwargs)
 
+    @staticmethod
+    def _new_id(config_id: str) -> DataNodeId:
+        """Generate a unique datanode identifier."""
+        return DataNodeId(
+            DataNode.__ID_SEPARATOR.join([DataNode._ID_PREFIX, _validate_id(config_id), str(uuid.uuid4())])
+        )
+
     @property
     def config_id(self):
         return self._config_id
@@ -158,7 +165,7 @@ class DataNode(_Entity, _Labeled):
     @property  # type: ignore
     @_self_reload(_MANAGER_NAME)
     def last_edit_date(self):
-        last_modified_datetime = self.__get_last_modified_datetime()
+        last_modified_datetime = self._get_last_modified_datetime(self._properties.get(self._PATH_KEY, None))
         if last_modified_datetime and last_modified_datetime > self._last_edit_date:
             return last_modified_datetime
         else:
@@ -270,7 +277,7 @@ class DataNode(_Entity, _Labeled):
         return {key: value for key, value in self.properties.items() if key not in self._TAIPY_PROPERTIES}
 
     def __eq__(self, other):
-        return self.id == other.id
+        return isinstance(other, DataNode) and self.id == other.id
 
     def __ne__(self, other):
         return not self == other
@@ -290,8 +297,8 @@ class DataNode(_Entity, _Labeled):
             return self._properties[protected_attribute_name]
         raise AttributeError(f"{attribute_name} is not an attribute of data node {self.id}")
 
-    def __get_last_modified_datetime(self) -> Optional[datetime]:
-        path = self._properties.get(self.__PATH_KEY, None)
+    @classmethod
+    def _get_last_modified_datetime(cls, path: Optional[str] = None) -> Optional[datetime]:
         if path and os.path.isfile(path):
             return datetime.fromtimestamp(os.path.getmtime(path))
 
@@ -380,7 +387,9 @@ class DataNode(_Entity, _Labeled):
         """
         edit = {k: v for k, v in options.items() if v is not None}
         if "timestamp" not in edit:
-            edit["timestamp"] = datetime.now()
+            edit["timestamp"] = (
+                self._get_last_modified_datetime(self._properties.get(self._PATH_KEY, None)) or datetime.now()
+            )
         self.last_edit_date = edit.get("timestamp")
         self._edits.append(edit)
 

+ 16 - 49
taipy/core/data/excel.py

@@ -9,9 +9,7 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import os
 from datetime import datetime, timedelta
-from os.path import isfile
 from typing import Any, Dict, List, Optional, Set, Tuple, Union
 
 import numpy as np
@@ -20,12 +18,11 @@ from openpyxl import load_workbook
 
 from taipy.config.common.scope import Scope
 
-from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..exceptions.exceptions import ExposedTypeLengthMismatch, NonExistingExcelSheet, SheetNameLengthMismatch
 from ..job.job_id import JobId
-from ._abstract_file import _FileDataNodeMixin
-from ._abstract_tabular import _TabularDataNodeMixin
+from ._file_datanode_mixin import _FileDataNodeMixin
+from ._tabular_datanode_mixin import _TabularDataNodeMixin
 from .data_node import DataNode
 from .data_node_id import DataNodeId, Edit
 
@@ -68,10 +65,8 @@ class ExcelDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
     """
 
     __STORAGE_TYPE = "excel"
-    __PATH_KEY = "path"
-    __DEFAULT_DATA_KEY = "default_data"
-    __DEFAULT_PATH_KEY = "default_path"
     __SHEET_NAME_PROPERTY = "sheet_name"
+
     _REQUIRED_PROPERTIES: List[str] = []
 
     def __init__(
@@ -90,13 +85,11 @@ class ExcelDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
         editor_expiration_date: Optional[datetime] = None,
         properties: Dict = None,
     ):
+        self.id = id or self._new_id(config_id)
+
         if properties is None:
             properties = {}
 
-        default_value = properties.pop(self.__DEFAULT_DATA_KEY, None)
-        self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
-        properties[self.__PATH_KEY] = self._path
-
         if self.__SHEET_NAME_PROPERTY not in properties.keys():
             properties[self.__SHEET_NAME_PROPERTY] = None
         if self._HAS_HEADER_PROPERTY not in properties.keys():
@@ -104,11 +97,15 @@ class ExcelDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
         properties[self._EXPOSED_TYPE_PROPERTY] = _TabularDataNodeMixin._get_valid_exposed_type(properties)
         self._check_exposed_type(properties[self._EXPOSED_TYPE_PROPERTY])
 
+        default_value = properties.pop(self._DEFAULT_DATA_KEY, None)
+        _FileDataNodeMixin.__init__(self, properties)
+        _TabularDataNodeMixin.__init__(self, **properties)
+
         DataNode.__init__(
             self,
             config_id,
             scope,
-            id,
+            self.id,
             owner_id,
             parent_ids,
             last_edit_date,
@@ -120,51 +117,21 @@ class ExcelDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
             editor_expiration_date,
             **properties,
         )
-        _TabularDataNodeMixin.__init__(self, **properties)
-        if self._path and ".data" in self._path:
-            self._path = self._migrate_path(self.storage_type(), self._path)
-
-        if not self._path:
-            self._path = self._build_path(self.storage_type())
-            properties[self.__PATH_KEY] = self._path
-
-        if default_value is not None and not os.path.exists(self._path):
-            self._write(default_value)
-            self._last_edit_date = datetime.now()
-            self._edits.append(
-                Edit(
-                    {
-                        "timestamp": self._last_edit_date,
-                        "writer_identifier": "TAIPY",
-                        "comments": "Default data written.",
-                    }
-                )
-            )
 
-        if not self._last_edit_date and isfile(self._path):
-            self._last_edit_date = datetime.now()
+        self._write_default_data(default_value)
 
         self._TAIPY_PROPERTIES.update(
             {
-                self._EXPOSED_TYPE_PROPERTY,
-                self.__PATH_KEY,
-                self.__DEFAULT_PATH_KEY,
-                self.__DEFAULT_DATA_KEY,
+                self._PATH_KEY,
+                self._DEFAULT_PATH_KEY,
+                self._DEFAULT_DATA_KEY,
+                self._IS_GENERATED_KEY,
                 self._HAS_HEADER_PROPERTY,
+                self._EXPOSED_TYPE_PROPERTY,
                 self.__SHEET_NAME_PROPERTY,
             }
         )
 
-    @property  # type: ignore
-    @_self_reload(DataNode._MANAGER_NAME)
-    def path(self):
-        return self._path
-
-    @path.setter
-    def path(self, value):
-        self._path = value
-        self.properties[self.__PATH_KEY] = value
-
     @classmethod
     def storage_type(cls) -> str:
         return cls.__STORAGE_TYPE

+ 14 - 45
taipy/core/data/json.py

@@ -11,10 +11,8 @@
 
 import dataclasses
 import json
-import os
 from datetime import date, datetime, timedelta
 from enum import Enum
-from os.path import isfile
 from pydoc import locate
 from typing import Any, Dict, List, Optional, Set
 
@@ -22,7 +20,7 @@ from taipy.config.common.scope import Scope
 
 from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
-from ._abstract_file import _FileDataNodeMixin
+from ._file_datanode_mixin import _FileDataNodeMixin
 from .data_node import DataNode
 from .data_node_id import DataNodeId, Edit
 
@@ -62,9 +60,6 @@ class JSONDataNode(DataNode, _FileDataNodeMixin):
     """
 
     __STORAGE_TYPE = "json"
-    __DEFAULT_DATA_KEY = "default_data"
-    __DEFAULT_PATH_KEY = "default_path"
-    __PATH_KEY = "path"
     __ENCODING_KEY = "encoding"
     _ENCODER_KEY = "encoder"
     _DECODER_KEY = "decoder"
@@ -86,18 +81,22 @@ class JSONDataNode(DataNode, _FileDataNodeMixin):
         editor_expiration_date: Optional[datetime] = None,
         properties: Optional[Dict] = None,
     ):
+        self.id = id or self._new_id(config_id)
+
         if properties is None:
             properties = {}
 
-        default_value = properties.pop(self.__DEFAULT_DATA_KEY, None)
-
         if self.__ENCODING_KEY not in properties.keys():
             properties[self.__ENCODING_KEY] = "utf-8"
 
-        super().__init__(
+        default_value = properties.pop(self._DEFAULT_DATA_KEY, None)
+        _FileDataNodeMixin.__init__(self, properties)
+
+        DataNode.__init__(
+            self,
             config_id,
             scope,
-            id,
+            self.id,
             owner_id,
             parent_ids,
             last_edit_date,
@@ -109,39 +108,19 @@ class JSONDataNode(DataNode, _FileDataNodeMixin):
             editor_expiration_date,
             **properties,
         )
-        self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
-        if self._path and ".data" in self._path:
-            self._path = self._migrate_path(self.storage_type(), self._path)
-
-        if not self._path:
-            self._path = self._build_path(self.storage_type())
-        properties[self.__PATH_KEY] = self._path
 
         self._decoder = self._properties.get(self._DECODER_KEY, _DefaultJSONDecoder)
         self._encoder = self._properties.get(self._ENCODER_KEY, _DefaultJSONEncoder)
 
-        if default_value is not None and not os.path.exists(self._path):
-            self._write(default_value)
-            self._last_edit_date = datetime.now()
-            self._edits.append(
-                Edit(
-                    {
-                        "timestamp": self._last_edit_date,
-                        "writer_identifier": "TAIPY",
-                        "comments": "Default data written.",
-                    }
-                )
-            )
-
-        if not self._last_edit_date and isfile(self._path):  # type: ignore
-            self._last_edit_date = datetime.now()
+        self._write_default_data(default_value)
 
         self._TAIPY_PROPERTIES.update(
             {
-                self.__PATH_KEY,
-                self.__DEFAULT_PATH_KEY,
+                self._PATH_KEY,
+                self._DEFAULT_PATH_KEY,
+                self._DEFAULT_DATA_KEY,
+                self._IS_GENERATED_KEY,
                 self.__ENCODING_KEY,
-                self.__DEFAULT_DATA_KEY,
                 self._ENCODER_KEY,
                 self._DECODER_KEY,
             }
@@ -151,16 +130,6 @@ class JSONDataNode(DataNode, _FileDataNodeMixin):
     def storage_type(cls) -> str:
         return cls.__STORAGE_TYPE
 
-    @property  # type: ignore
-    @_self_reload(DataNode._MANAGER_NAME)
-    def path(self):
-        return self._path
-
-    @path.setter
-    def path(self, value):
-        self._path = value
-        self.properties[self.__PATH_KEY] = value
-
     @property  # type: ignore
     @_self_reload(DataNode._MANAGER_NAME)
     def encoder(self):

+ 14 - 46
taipy/core/data/parquet.py

@@ -9,7 +9,6 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import os
 from datetime import datetime, timedelta
 from os.path import isdir, isfile
 from typing import Any, Dict, List, Optional, Set
@@ -19,12 +18,11 @@ import pandas as pd
 
 from taipy.config.common.scope import Scope
 
-from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..exceptions.exceptions import UnknownCompressionAlgorithm, UnknownParquetEngine
 from ..job.job_id import JobId
-from ._abstract_file import _FileDataNodeMixin
-from ._abstract_tabular import _TabularDataNodeMixin
+from ._file_datanode_mixin import _FileDataNodeMixin
+from ._tabular_datanode_mixin import _TabularDataNodeMixin
 from .data_node import DataNode
 from .data_node_id import DataNodeId, Edit
 
@@ -75,9 +73,6 @@ class ParquetDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
     """
 
     __STORAGE_TYPE = "parquet"
-    __PATH_KEY = "path"
-    __DEFAULT_DATA_KEY = "default_data"
-    __DEFAULT_PATH_KEY = "default_path"
     __ENGINE_PROPERTY = "engine"
     __VALID_PARQUET_ENGINES = ["pyarrow", "fastparquet"]
     __COMPRESSION_PROPERTY = "compression"
@@ -102,11 +97,11 @@ class ParquetDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
         editor_expiration_date: Optional[datetime] = None,
         properties: Optional[Dict] = None,
     ):
+        self.id = id or self._new_id(config_id)
+
         if properties is None:
             properties = {}
 
-        default_value = properties.pop(self.__DEFAULT_DATA_KEY, None)
-
         if self.__ENGINE_PROPERTY not in properties.keys():
             properties[self.__ENGINE_PROPERTY] = "pyarrow"
         if properties[self.__ENGINE_PROPERTY] not in self.__VALID_PARQUET_ENGINES:
@@ -137,11 +132,15 @@ class ParquetDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
         properties[self._EXPOSED_TYPE_PROPERTY] = _TabularDataNodeMixin._get_valid_exposed_type(properties)
         self._check_exposed_type(properties[self._EXPOSED_TYPE_PROPERTY])
 
+        default_value = properties.pop(self._DEFAULT_DATA_KEY, None)
+        _FileDataNodeMixin.__init__(self, properties)
+        _TabularDataNodeMixin.__init__(self, **properties)
+
         DataNode.__init__(
             self,
             config_id,
             scope,
-            id,
+            self.id,
             owner_id,
             parent_ids,
             last_edit_date,
@@ -153,39 +152,18 @@ class ParquetDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
             editor_expiration_date,
             **properties,
         )
-        _TabularDataNodeMixin.__init__(self, **properties)
-
-        self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
-
-        if self._path and ".data" in self._path:
-            self._path = self._migrate_path(self.storage_type(), self._path)
-        if not self._path:
-            self._path = self._build_path(self.storage_type())
 
-        properties[self.__PATH_KEY] = self._path
-
-        if default_value is not None and not os.path.exists(self._path):
-            self._write(default_value)
-            self._last_edit_date = datetime.now()
-            self._edits.append(
-                Edit(
-                    {
-                        "timestamp": self._last_edit_date,
-                        "writer_identifier": "TAIPY",
-                        "comments": "Default data written.",
-                    }
-                )
-            )
+        self._write_default_data(default_value)
 
         if not self._last_edit_date and (isfile(self._path) or isdir(self._path)):
             self._last_edit_date = datetime.now()
-
         self._TAIPY_PROPERTIES.update(
             {
                 self._EXPOSED_TYPE_PROPERTY,
-                self.__PATH_KEY,
-                self.__DEFAULT_PATH_KEY,
-                self.__DEFAULT_DATA_KEY,
+                self._PATH_KEY,
+                self._DEFAULT_PATH_KEY,
+                self._DEFAULT_DATA_KEY,
+                self._IS_GENERATED_KEY,
                 self.__ENGINE_PROPERTY,
                 self.__COMPRESSION_PROPERTY,
                 self.__READ_KWARGS_PROPERTY,
@@ -197,16 +175,6 @@ class ParquetDataNode(DataNode, _FileDataNodeMixin, _TabularDataNodeMixin):
     def storage_type(cls) -> str:
         return cls.__STORAGE_TYPE
 
-    @property  # type: ignore
-    @_self_reload(DataNode._MANAGER_NAME)
-    def path(self):
-        return self._path
-
-    @path.setter
-    def path(self, value):
-        self._path = value
-        self.properties[self.__PATH_KEY] = value
-
     def _read(self):
         return self.read_with_kwargs()
 

+ 18 - 56
taipy/core/data/pickle.py

@@ -9,16 +9,14 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import os
 import pickle
 from datetime import datetime, timedelta
-from typing import Any, List, Optional, Set
+from typing import List, Optional, Set
 
 from taipy.config.common.scope import Scope
 
-from .._entity._reload import _self_reload
 from .._version._version_manager_factory import _VersionManagerFactory
-from ._abstract_file import _FileDataNodeMixin
+from ._file_datanode_mixin import _FileDataNodeMixin
 from .data_node import DataNode
 from .data_node_id import DataNodeId, Edit
 
@@ -56,10 +54,7 @@ class PickleDataNode(DataNode, _FileDataNodeMixin):
     """
 
     __STORAGE_TYPE = "pickle"
-    __PATH_KEY = "path"
-    __DEFAULT_PATH_KEY = "default_path"
-    __DEFAULT_DATA_KEY = "default_data"
-    __IS_GENERATED_KEY = "is_generated"
+
     _REQUIRED_PROPERTIES: List[str] = []
 
     def __init__(
@@ -78,18 +73,19 @@ class PickleDataNode(DataNode, _FileDataNodeMixin):
         editor_expiration_date: Optional[datetime] = None,
         properties=None,
     ):
+        self.id = id or self._new_id(config_id)
+
         if properties is None:
             properties = {}
-        default_value = properties.pop(self.__DEFAULT_DATA_KEY, None)
-        self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
-        if self._path is not None:
-            properties[self.__PATH_KEY] = self._path
-        self._is_generated = properties.get(self.__IS_GENERATED_KEY, self._path is None)
-        properties[self.__IS_GENERATED_KEY] = self._is_generated
-        super().__init__(
+
+        default_value = properties.pop(self._DEFAULT_DATA_KEY, None)
+        _FileDataNodeMixin.__init__(self, properties)
+
+        DataNode.__init__(
+            self,
             config_id,
             scope,
-            id,
+            self.id,
             owner_id,
             parent_ids,
             last_edit_date,
@@ -101,33 +97,15 @@ class PickleDataNode(DataNode, _FileDataNodeMixin):
             editor_expiration_date,
             **properties,
         )
-        if self._path and ".data" in self._path:
-            self._path = self._migrate_path(self.storage_type(), self._path)
-
-        if self._path is None:
-            self._path = self._build_path(self.storage_type())
-
-        if default_value is not None and not os.path.exists(self._path):
-            self._write(default_value)
-            self._last_edit_date = datetime.now()
-            self._edits.append(
-                Edit(
-                    {
-                        "timestamp": self._last_edit_date,
-                        "writer_identifier": "TAIPY",
-                        "comments": "Default data written.",
-                    }
-                )
-            )
-        if not self._last_edit_date and os.path.exists(self._path):
-            self._last_edit_date = datetime.now()
+
+        self._write_default_data(default_value)
 
         self._TAIPY_PROPERTIES.update(
             {
-                self.__PATH_KEY,
-                self.__DEFAULT_PATH_KEY,
-                self.__DEFAULT_DATA_KEY,
-                self.__IS_GENERATED_KEY,
+                self._PATH_KEY,
+                self._DEFAULT_PATH_KEY,
+                self._DEFAULT_DATA_KEY,
+                self._IS_GENERATED_KEY,
             }
         )
 
@@ -135,22 +113,6 @@ class PickleDataNode(DataNode, _FileDataNodeMixin):
     def storage_type(cls) -> str:
         return cls.__STORAGE_TYPE
 
-    @property  # type: ignore
-    @_self_reload(DataNode._MANAGER_NAME)
-    def path(self) -> Any:
-        return self._path
-
-    @path.setter
-    def path(self, value):
-        self._path = value
-        self.properties[self.__PATH_KEY] = value
-        self.properties[self.__IS_GENERATED_KEY] = False
-
-    @property  # type: ignore
-    @_self_reload(DataNode._MANAGER_NAME)
-    def is_generated(self) -> bool:
-        return self._is_generated
-
     def _read(self):
         with open(self._path, "rb") as pf:
             return pickle.load(pf)

+ 35 - 8
taipy/core/exceptions/exceptions.py

@@ -261,7 +261,7 @@ class NonExistingScenarioConfig(Exception):
         self.message = f"Scenario config: {scenario_config_id} does not exist."
 
 
-class InvalidSscenario(Exception):
+class InvalidScenario(Exception):
     """Raised if a Scenario is not a Directed Acyclic Graph."""
 
     def __init__(self, scenario_id: str):
@@ -339,10 +339,6 @@ class ModeNotAvailable(Exception):
     """Raised if the mode in JobConfig is not supported."""
 
 
-class InvalidExportPath(Exception):
-    """Raised if the export path is not valid."""
-
-
 class NonExistingVersion(Exception):
     """Raised if request a Version that is not known by the Version Manager."""
 
@@ -373,16 +369,47 @@ class FileCannotBeRead(Exception):
     """Raised when a file cannot be read."""
 
 
-class ExportFolderAlreadyExists(Exception):
+class ExportPathAlreadyExists(Exception):
     """Raised when the export folder already exists."""
 
-    def __init__(self, folder_path: str, scenario_id: str):
+    def __init__(self, export_path: str, scenario_id: str):
         self.message = (
-            f"Folder '{folder_path}' already exists and can not be used to export scenario '{scenario_id}'."
+            f"The path '{export_path}' already exists and can not be used to export scenario '{scenario_id}'."
             " Please use the 'override' parameter to override it."
         )
 
 
+class EntitiesToBeImportAlredyExist(Exception):
+    """Raised when entities in the scenario to be imported have already exists"""
+
+    def __init__(self, import_path):
+        self.message = f"The import archive file {import_path} contains entities that have already existed."
+
+
+class DataToBeImportAlredyExist(Exception):
+    """Raised when data files in the scenario to be imported have already exists"""
+
+    def __init__(self, import_path):
+        self.message = (
+            f"The import archive file {import_path} contains data files that have already existed."
+            " Please use the 'override' parameter to override those."
+        )
+
+
+class ImportArchiveDoesntContainAnyScenario(Exception):
+    """Raised when the import archive file doesn't contain any scenario"""
+
+    def __init__(self, import_path):
+        self.message = f"The import archive file {import_path} doesn't contain any scenario."
+
+
+class ImportScenarioDoesntHaveAVersion(Exception):
+    """Raised when the import scenario doesn't have a version"""
+
+    def __init__(self, import_path):
+        self.message = f"The import scenario in the import archive file {import_path} doesn't have a version."
+
+
 class SQLQueryCannotBeExecuted(Exception):
     """Raised when an SQL Query cannot be executed."""
 

+ 3 - 1
taipy/core/job/_job_manager.py

@@ -58,7 +58,9 @@ class _JobManager(_Manager[Job], _VersionMixin):
         return job
 
     @classmethod
-    def _delete(cls, job: Job, force=False):
+    def _delete(cls, job: Union[Job, JobId], force=False):
+        if isinstance(job, str):
+            job = cls._get(job)
         if cls._is_deletable(job) or force:
             super()._delete(job.id)
         else:

+ 1 - 2
taipy/core/job/_job_manager_factory.py

@@ -19,11 +19,10 @@ from ._job_sql_repository import _JobSQLRepository
 
 
 class _JobManagerFactory(_ManagerFactory):
-
     __REPOSITORY_MAP = {"default": _JobFSRepository, "sql": _JobSQLRepository}
 
     @classmethod
-    def _build_manager(cls) -> Type[_JobManager]:  # type: ignore
+    def _build_manager(cls) -> Type[_JobManager]:
         if cls._using_enterprise():
             job_manager = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".job._job_manager", "_JobManager"

+ 1 - 1
taipy/core/job/job.py

@@ -164,7 +164,7 @@ class Job(_Entity, _Labeled):
         return self.creation_date.timestamp() >= other.creation_date.timestamp()
 
     def __eq__(self, other):
-        return self.id == other.id
+        return isinstance(other, Job) and self.id == other.id
 
     @_run_callbacks
     def blocked(self):

+ 110 - 3
taipy/core/scenario/_scenario_manager.py

@@ -10,14 +10,18 @@
 # specific language governing permissions and limitations under the License.
 
 import datetime
+import pathlib
+import tempfile
+import zipfile
 from functools import partial
-from typing import Any, Callable, List, Optional, Union
+from typing import Any, Callable, Dict, List, Literal, Optional, Type, Union
 
 from taipy.config import Config
 
 from .._entity._entity_ids import _EntityIds
 from .._manager._manager import _Manager
 from .._repository._abstract_repository import _AbstractRepository
+from .._version._version_manager_factory import _VersionManagerFactory
 from .._version._version_mixin import _VersionMixin
 from ..common.warn_if_inputs_not_ready import _warn_if_inputs_not_ready
 from ..config.scenario_config import ScenarioConfig
@@ -28,9 +32,12 @@ from ..exceptions.exceptions import (
     DeletingPrimaryScenario,
     DifferentScenarioConfigs,
     DoesNotBelongToACycle,
+    EntitiesToBeImportAlredyExist,
+    ImportArchiveDoesntContainAnyScenario,
+    ImportScenarioDoesntHaveAVersion,
     InsufficientScenarioToCompare,
+    InvalidScenario,
     InvalidSequence,
-    InvalidSscenario,
     NonExistingComparator,
     NonExistingScenario,
     NonExistingScenarioConfig,
@@ -180,7 +187,7 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
         cls._set(scenario)
 
         if not scenario._is_consistent():
-            raise InvalidSscenario(scenario.id)
+            raise InvalidScenario(scenario.id)
 
         actual_sequences = scenario._get_sequences()
         for sequence_name in sequences.keys():
@@ -270,6 +277,24 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
     def _get_primary_scenarios(cls) -> List[Scenario]:
         return [scenario for scenario in cls._get_all() if scenario.is_primary]
 
+    @classmethod
+    def _sort_scenarios(
+        cls,
+        scenarios: List[Scenario],
+        descending: bool = False,
+        sort_key: Literal["name", "id", "config_id", "creation_date", "tags"] = "name",
+    ) -> List[Scenario]:
+        if sort_key in ["name", "config_id", "creation_date", "tags"]:
+            if sort_key == "tags":
+                scenarios.sort(key=lambda x: (tuple(sorted(x.tags)), x.id), reverse=descending)
+            else:
+                scenarios.sort(key=lambda x: (getattr(x, sort_key), x.id), reverse=descending)
+        elif sort_key == "id":
+            scenarios.sort(key=lambda x: x.id, reverse=descending)
+        else:
+            scenarios.sort(key=lambda x: (x.name, x.id), reverse=descending)
+        return scenarios
+
     @classmethod
     def _is_promotable_to_primary(cls, scenario: Union[Scenario, ScenarioId]) -> bool:
         if isinstance(scenario, str):
@@ -432,3 +457,85 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
         for fil in filters:
             fil.update({"config_id": config_id})
         return cls._repository._load_all(filters)
+
+    @classmethod
+    def _import_scenario_and_children_entities(
+        cls,
+        zip_file_path: pathlib.Path,
+        override: bool,
+        entity_managers: Dict[str, Type[_Manager]],
+    ) -> Optional[Scenario]:
+        with tempfile.TemporaryDirectory() as tmp_dir:
+            with zipfile.ZipFile(zip_file_path) as zip_file:
+                zip_file.extractall(tmp_dir)
+
+            tmp_dir_path = pathlib.Path(tmp_dir)
+
+            if not ((tmp_dir_path / "scenarios").exists() or (tmp_dir_path / "scenario").exists()):
+                raise ImportArchiveDoesntContainAnyScenario(zip_file_path)
+
+            if not (tmp_dir_path / "version").exists():
+                raise ImportScenarioDoesntHaveAVersion(zip_file_path)
+
+            # Import the version to check for compatibility
+            entity_managers["version"]._import(next((tmp_dir_path / "version").iterdir()), "")
+
+            valid_entity_folders = list(entity_managers.keys())
+            valid_data_folder = Config.core.storage_folder
+
+            imported_scenario = None
+            imported_entities: Dict[str, List] = {}
+
+            for entity_folder in tmp_dir_path.iterdir():
+                if not entity_folder.is_dir() or entity_folder.name not in valid_entity_folders + [valid_data_folder]:
+                    cls._logger.warning(f"{entity_folder} is not a valid Taipy folder and will not be imported.")
+                    continue
+
+            try:
+                for entity_type in valid_entity_folders:
+                    # Skip the version folder as it is already handled
+                    if entity_type == "version":
+                        continue
+
+                    entity_folder = tmp_dir_path / entity_type
+                    if not entity_folder.exists():
+                        continue
+
+                    manager = entity_managers[entity_type]
+                    imported_entities[entity_type] = []
+
+                    for entity_file in entity_folder.iterdir():
+                        # Check if the to-be-imported entity already exists
+                        entity_id = entity_file.stem
+                        if manager._exists(entity_id):
+                            if override:
+                                cls._logger.warning(f"{entity_id} already exists and will be overridden.")
+                            else:
+                                cls._logger.error(
+                                    f"{entity_id} already exists. Please use the 'override' parameter to override it."
+                                )
+                                raise EntitiesToBeImportAlredyExist(zip_file_path)
+
+                        # Import the entity
+                        imported_entity = manager._import(
+                            entity_file,
+                            version=_VersionManagerFactory._build_manager()._get_latest_version(),
+                            data_folder=tmp_dir_path / valid_data_folder,
+                        )
+
+                        imported_entities[entity_type].append(imported_entity.id)
+                        if entity_type in ["scenario", "scenarios"]:
+                            imported_scenario = imported_entity
+            except Exception as err:
+                cls._logger.error(f"An error occurred during the import: {err}. Rollback the import.")
+
+                # Rollback the import
+                for entity_type, entity_ids in list(imported_entities.items())[::-1]:
+                    manager = entity_managers[entity_type]
+                    for entity_id in entity_ids:
+                        if manager._exists(entity_id):
+                            manager._delete(entity_id)
+                raise err
+
+        cls._logger.info(f"Scenario {imported_scenario.id} has been successfully imported.")  # type: ignore[union-attr]
+        return imported_scenario

+ 1 - 2
taipy/core/scenario/_scenario_manager_factory.py

@@ -19,11 +19,10 @@ from ._scenario_sql_repository import _ScenarioSQLRepository
 
 
 class _ScenarioManagerFactory(_ManagerFactory):
-
     __REPOSITORY_MAP = {"default": _ScenarioFSRepository, "sql": _ScenarioSQLRepository}
 
     @classmethod
-    def _build_manager(cls) -> Type[_ScenarioManager]:  # type: ignore
+    def _build_manager(cls) -> Type[_ScenarioManager]:
         if cls._using_enterprise():
             scenario_manager = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".scenario._scenario_manager", "_ScenarioManager"

+ 1 - 1
taipy/core/scenario/scenario.py

@@ -136,7 +136,7 @@ class Scenario(_Entity, Submittable, _Labeled):
         return hash(self.id)
 
     def __eq__(self, other):
-        return self.id == other.id
+        return isinstance(other, Scenario) and self.id == other.id
 
     def __getattr__(self, attribute_name):
         protected_attribute_name = _validate_id(attribute_name)

+ 1 - 1
taipy/core/sequence/sequence.py

@@ -81,7 +81,7 @@ class Sequence(_Entity, Submittable, _Labeled):
         return hash(self.id)
 
     def __eq__(self, other):
-        return self.id == other.id
+        return isinstance(other, Sequence) and self.id == other.id
 
     def __getattr__(self, attribute_name):
         protected_attribute_name = _validate_id(attribute_name)

+ 1 - 2
taipy/core/submission/_submission_manager_factory.py

@@ -19,11 +19,10 @@ from ._submission_sql_repository import _SubmissionSQLRepository
 
 
 class _SubmissionManagerFactory(_ManagerFactory):
-
     __REPOSITORY_MAP = {"default": _SubmissionFSRepository, "sql": _SubmissionSQLRepository}
 
     @classmethod
-    def _build_manager(cls) -> Type[_SubmissionManager]:  # type: ignore
+    def _build_manager(cls) -> Type[_SubmissionManager]:
         if cls._using_enterprise():
             submission_manager = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".submission._submission_manager", "_SubmissionManager"

+ 1 - 1
taipy/core/submission/submission.py

@@ -138,7 +138,7 @@ class Submission(_Entity, _Labeled):
         return hash(self.id)
 
     def __eq__(self, other):
-        return self.id == other.id
+        return isinstance(other, Submission) and self.id == other.id
 
     @property  # type: ignore
     @_self_reload(_MANAGER_NAME)

+ 135 - 46
taipy/core/taipy.py

@@ -12,14 +12,16 @@
 import os
 import pathlib
 import shutil
+import tempfile
 from datetime import datetime
-from typing import Any, Callable, Dict, List, Optional, Set, Union, overload
+from typing import Any, Callable, Dict, List, Literal, Optional, Set, Type, Union, overload
 
-from taipy.config import Config, Scope
+from taipy.config import Scope
 from taipy.logger._taipy_logger import _TaipyLogger
 
 from ._core import Core
 from ._entity._entity import _Entity
+from ._manager._manager import _Manager
 from ._version._version_manager_factory import _VersionManagerFactory
 from .common._check_instance import (
     _is_cycle,
@@ -41,8 +43,7 @@ from .data.data_node import DataNode
 from .data.data_node_id import DataNodeId
 from .exceptions.exceptions import (
     DataNodeConfigIsNotGlobal,
-    ExportFolderAlreadyExists,
-    InvalidExportPath,
+    ExportPathAlreadyExists,
     ModelNotFound,
     NonExistingVersion,
     VersionIsNotProductionVersion,
@@ -65,7 +66,7 @@ from .task.task_id import TaskId
 __logger = _TaipyLogger._get_logger()
 
 
-def set(entity: Union[DataNode, Task, Sequence, Scenario, Cycle]):
+def set(entity: Union[DataNode, Task, Sequence, Scenario, Cycle, Submission]):
     """Save or update an entity.
 
     This function allows you to save or update an entity in Taipy.
@@ -508,7 +509,13 @@ def delete(entity_id: Union[TaskId, DataNodeId, SequenceId, ScenarioId, JobId, C
     raise ModelNotFound("NOT_DETERMINED", entity_id)
 
 
-def get_scenarios(cycle: Optional[Cycle] = None, tag: Optional[str] = None) -> List[Scenario]:
+def get_scenarios(
+    cycle: Optional[Cycle] = None,
+    tag: Optional[str] = None,
+    is_sorted: bool = False,
+    descending: bool = False,
+    sort_key: Literal["name", "id", "config_id", "creation_date", "tags"] = "name",
+) -> List[Scenario]:
     """Retrieve a list of existing scenarios filtered by cycle or tag.
 
     This function allows you to retrieve a list of scenarios based on optional
@@ -519,22 +526,34 @@ def get_scenarios(cycle: Optional[Cycle] = None, tag: Optional[str] = None) -> L
     Parameters:
          cycle (Optional[Cycle^]): The optional `Cycle^` to filter scenarios by.
          tag (Optional[str]): The optional tag to filter scenarios by.
+         is_sorted (bool): The option to sort scenarios. The default sorting key is name.
+         descending (bool): The option to sort scenarios on the sorting key in descending order.
+         sort_key (Literal["name", "id", "creation_date", "tags"]): The optiononal sort_key to
+             decide upon what key scenarios are sorted. The sorting is in increasing order for
+             dates, in alphabetical order for name and id, in lexographical order for tags.
 
     Returns:
-        The list of scenarios filtered by cycle or tag. If no filtering criteria
-            are provided, this method returns all existing scenarios.
+        The list of scenarios filtered by cycle or tag and optionally sorted by name, id, creation_date or tags.
+            If no filtering criterion is provided, this method returns all existing scenarios.
+            If is_sorted is set to true, the scenarios are sorted by sort_key. The scenarios
+            are sorted by name if an incorrect or no sort_key is provided.
     """
     scenario_manager = _ScenarioManagerFactory._build_manager()
     if not cycle and not tag:
-        return scenario_manager._get_all()
-    if cycle and not tag:
-        return scenario_manager._get_all_by_cycle(cycle)
-    if not cycle and tag:
-        return scenario_manager._get_all_by_tag(tag)
-    if cycle and tag:
+        scenarios = scenario_manager._get_all()
+    elif cycle and not tag:
+        scenarios = scenario_manager._get_all_by_cycle(cycle)
+    elif not cycle and tag:
+        scenarios = scenario_manager._get_all_by_tag(tag)
+    elif cycle and tag:
         cycles_scenarios = scenario_manager._get_all_by_cycle(cycle)
-        return [scenario for scenario in cycles_scenarios if scenario.has_tag(tag)]
-    return []
+        scenarios = [scenario for scenario in cycles_scenarios if scenario.has_tag(tag)]
+    else:
+        scenarios = []
+
+    if is_sorted:
+        scenario_manager._sort_scenarios(scenarios, descending, sort_key)
+    return scenarios
 
 
 def get_primary(cycle: Cycle) -> Optional[Scenario]:
@@ -550,13 +569,31 @@ def get_primary(cycle: Cycle) -> Optional[Scenario]:
     return _ScenarioManagerFactory._build_manager()._get_primary(cycle)
 
 
-def get_primary_scenarios() -> List[Scenario]:
+def get_primary_scenarios(
+    is_sorted: bool = False,
+    descending: bool = False,
+    sort_key: Literal["name", "id", "config_id", "creation_date", "tags"] = "name",
+) -> List[Scenario]:
     """Retrieve a list of all primary scenarios.
 
+    Parameters:
+         is_sorted (bool): The option to sort scenarios. The default sorting key is name.
+         descending (bool): The option to sort scenarios on the sorting key in descending order.
+         sort_key (Literal["name", "id", "creation_date", "tags"]): The optiononal sort_key to
+             decide upon what key scenarios are sorted. The sorting is in increasing order for
+             dates, in alphabetical order for name and id, in lexographical order for tags.
+
     Returns:
-        A list containing all primary scenarios.
+        The list containing all primary scenarios, optionally sorted by name, id, creation_date or tags.
+            The sorting is in increasing order for dates, in alphabetical order for name and
+            id, and in lexicographical order for tags. If sorted is set to true, but if an
+            incorrect or no sort_key is provided, the scenarios are sorted by name.
     """
-    return _ScenarioManagerFactory._build_manager()._get_primary_scenarios()
+    scenario_manager = _ScenarioManagerFactory._build_manager()
+    scenarios = scenario_manager._get_primary_scenarios()
+    if is_sorted:
+        scenario_manager._sort_scenarios(scenarios, descending, sort_key)
+    return scenarios
 
 
 def is_promotable(scenario: Union[Scenario, ScenarioId]) -> bool:
@@ -944,18 +981,19 @@ def clean_all_entities(version_number: str) -> bool:
 
 def export_scenario(
     scenario_id: ScenarioId,
-    folder_path: Union[str, pathlib.Path],
+    output_path: Union[str, pathlib.Path],
     override: bool = False,
     include_data: bool = False,
 ):
-    """Export all related entities of a scenario to a folder.
+    """Export all related entities of a scenario to a archive zip file.
 
     This function exports all related entities of the specified scenario to the
-    specified folder.
+    specified archive zip file.
 
     Parameters:
         scenario_id (ScenarioId): The ID of the scenario to export.
-        folder_path (Union[str, pathlib.Path]): The folder path to export the scenario to.
+        output_path (Union[str, pathlib.Path]): The path to export the scenario to.
+            The path should include the file name without the extension or with the `.zip` extension.
             If the path exists and the override parameter is False, an exception is raised.
         override (bool): If True, the existing folder will be overridden. Default is False.
         include_data (bool): If True, the file-based data nodes are exported as well.
@@ -964,7 +1002,7 @@ def export_scenario(
             will not be exported. The default value is False.
 
     Raises:
-        ExportFolderAlreadyExist^: If the `folder_path` already exists and the override parameter is False.
+        ExportPathAlreadyExists^: If the `output_path` already exists and the override parameter is False.
     """
     manager = _ScenarioManagerFactory._build_manager()
     scenario = manager._get(scenario_id)
@@ -973,31 +1011,82 @@ def export_scenario(
     if scenario.cycle:
         entity_ids.cycle_ids = {scenario.cycle.id}
 
-    if folder_path == Config.core.taipy_storage_folder:
-        raise InvalidExportPath("The export folder must not be the storage folder.")
+    output_filename = os.path.splitext(output_path)[0] if str(output_path).endswith(".zip") else str(output_path)
+    output_zip_path = pathlib.Path(output_filename + ".zip")
 
-    if os.path.exists(folder_path):
+    if output_zip_path.exists():
         if override:
-            __logger.warning(f"Override the existing folder '{folder_path}'")
-            shutil.rmtree(folder_path, ignore_errors=True)
+            __logger.warning(f"Override the existing path '{output_zip_path}' to export scenario {scenario_id}.")
+            output_zip_path.unlink()
         else:
-            raise ExportFolderAlreadyExists(str(folder_path), scenario_id)
-
-    for data_node_id in entity_ids.data_node_ids:
-        _DataManagerFactory._build_manager()._export(data_node_id, folder_path, include_data=include_data)
-    for task_id in entity_ids.task_ids:
-        _TaskManagerFactory._build_manager()._export(task_id, folder_path)
-    for sequence_id in entity_ids.sequence_ids:
-        _SequenceManagerFactory._build_manager()._export(sequence_id, folder_path)
-    for cycle_id in entity_ids.cycle_ids:
-        _CycleManagerFactory._build_manager()._export(cycle_id, folder_path)
-    for scenario_id in entity_ids.scenario_ids:
-        _ScenarioManagerFactory._build_manager()._export(scenario_id, folder_path)
-    for job_id in entity_ids.job_ids:
-        _JobManagerFactory._build_manager()._export(job_id, folder_path)
-    for submission_id in entity_ids.submission_ids:
-        _SubmissionManagerFactory._build_manager()._export(submission_id, folder_path)
-    _VersionManagerFactory._build_manager()._export(scenario.version, folder_path)
+            raise ExportPathAlreadyExists(str(output_zip_path), scenario_id)
+
+    with tempfile.TemporaryDirectory() as tmp_dir:
+        for data_node_id in entity_ids.data_node_ids:
+            _DataManagerFactory._build_manager()._export(data_node_id, tmp_dir, include_data=include_data)
+        for task_id in entity_ids.task_ids:
+            _TaskManagerFactory._build_manager()._export(task_id, tmp_dir)
+        for sequence_id in entity_ids.sequence_ids:
+            _SequenceManagerFactory._build_manager()._export(sequence_id, tmp_dir)
+        for cycle_id in entity_ids.cycle_ids:
+            _CycleManagerFactory._build_manager()._export(cycle_id, tmp_dir)
+        for scenario_id in entity_ids.scenario_ids:
+            _ScenarioManagerFactory._build_manager()._export(scenario_id, tmp_dir)
+        for job_id in entity_ids.job_ids:
+            _JobManagerFactory._build_manager()._export(job_id, tmp_dir)
+        for submission_id in entity_ids.submission_ids:
+            _SubmissionManagerFactory._build_manager()._export(submission_id, tmp_dir)
+        _VersionManagerFactory._build_manager()._export(scenario.version, tmp_dir)
+
+        shutil.make_archive(output_filename, "zip", tmp_dir)
+
+
+def import_scenario(input_path: Union[str, pathlib.Path], override: bool = False) -> Optional[Scenario]:
+    """Import from an archive zip file containing an exported scenario into the current Taipy application.
+
+    The zip file should be created by the `taipy.import()^` method, which contains all related entities
+    of the scenario.
+    All entities should belong to the same version that is compatible with the current Taipy application version.
+
+    Parameters:
+        input_path (Union[str, pathlib.Path]): The path to the archive scenario to import.
+            If the path doesn't exist, an exception is raised.
+        override (bool): If True, override the entities if existed. Default value is False.
+
+    Return:
+        The imported scenario.
+
+    Raises:
+        FileNotFoundError: If the import path does not exist.
+        ImportArchiveDoesntContainAnyScenario: If the unzip folder doesn't contain any scenario.
+        ConflictedConfigurationError: If the configuration of the imported scenario is conflicted with the current one.
+    """
+    if isinstance(input_path, str):
+        zip_file_path: pathlib.Path = pathlib.Path(input_path)
+    else:
+        zip_file_path = input_path
+
+    if not zip_file_path.exists():
+        raise FileNotFoundError(f"The import archive path '{zip_file_path}' does not exist.")
+
+    entity_managers: Dict[str, Type[_Manager]] = {
+        "cycles": _CycleManagerFactory._build_manager(),
+        "cycle": _CycleManagerFactory._build_manager(),
+        "data_nodes": _DataManagerFactory._build_manager(),
+        "data_node": _DataManagerFactory._build_manager(),
+        "tasks": _TaskManagerFactory._build_manager(),
+        "task": _TaskManagerFactory._build_manager(),
+        "scenarios": _ScenarioManagerFactory._build_manager(),
+        "scenario": _ScenarioManagerFactory._build_manager(),
+        "jobs": _JobManagerFactory._build_manager(),
+        "job": _JobManagerFactory._build_manager(),
+        "submission": _SubmissionManagerFactory._build_manager(),
+        "version": _VersionManagerFactory._build_manager(),
+    }
+
+    return _ScenarioManagerFactory._build_manager()._import_scenario_and_children_entities(
+        zip_file_path, override, entity_managers
+    )
 
 
 def get_parents(

+ 1 - 2
taipy/core/task/_task_manager_factory.py

@@ -19,11 +19,10 @@ from ._task_sql_repository import _TaskSQLRepository
 
 
 class _TaskManagerFactory(_ManagerFactory):
-
     __REPOSITORY_MAP = {"default": _TaskFSRepository, "sql": _TaskSQLRepository}
 
     @classmethod
-    def _build_manager(cls) -> Type[_TaskManager]:  # type: ignore
+    def _build_manager(cls) -> Type[_TaskManager]:
         if cls._using_enterprise():
             task_manager = _load_fct(
                 cls._TAIPY_ENTERPRISE_CORE_MODULE + ".task._task_manager", "_TaskManager"

+ 1 - 1
taipy/core/task/task.py

@@ -82,7 +82,7 @@ class Task(_Entity, _Labeled):
         return hash(self.id)
 
     def __eq__(self, other):
-        return self.id == other.id
+        return isinstance(other, Task) and self.id == other.id
 
     def __getstate__(self):
         return vars(self)

+ 15 - 12
taipy/gui/_renderers/builder.py

@@ -366,16 +366,18 @@ class _Builder:
         lov_name = self.__hashes.get(var_name)
         lov = self.__get_list_of_(var_name)
         default_lov = []
+
+        adapter = self.__attributes.get("adapter")
+        if adapter and isinstance(adapter, str):
+            adapter = self.__gui._get_user_function(adapter)
+        if adapter and not callable(adapter):
+            _warn(f"{self.__element_name}: adapter property value is invalid.")
+            adapter = None
+        var_type = self.__attributes.get("type")
+        if isclass(var_type):
+            var_type = var_type.__name__  # type: ignore
+
         if isinstance(lov, list):
-            adapter = self.__attributes.get("adapter")
-            if adapter and isinstance(adapter, str):
-                adapter = self.__gui._get_user_function(adapter)
-            if adapter and not callable(adapter):
-                _warn(f"{self.__element_name}: adapter property value is invalid.")
-                adapter = None
-            var_type = self.__attributes.get("type")
-            if isclass(var_type):
-                var_type = var_type.__name__  # type: ignore
             if not isinstance(var_type, str):
                 elt = None
                 if len(lov) == 0:
@@ -450,8 +452,9 @@ class _Builder:
                 else lov_name
             )
             hash_name = self.__get_typed_hash_name(typed_lov_hash, PropertyType.lov)
-            self.__update_vars.append(f"{property_name}={hash_name}")
-            self.__set_react_attribute(property_name, hash_name)
+            camel_prop = _to_camel_case(property_name)
+            self.__update_vars.append(f"{camel_prop}={hash_name}")
+            self.__set_react_attribute(camel_prop, hash_name)
 
         return self
 
@@ -1009,7 +1012,7 @@ class _Builder:
                     self.__update_vars.append(f"{prop_name}={hash_name}")
                     self.__set_react_attribute(prop_name, hash_name)
 
-            self.__set_refresh_on_update()
+        self.__set_refresh_on_update()
         return self
 
     def set_attribute(self, name: str, value: t.Any):

+ 11 - 7
taipy/gui/_renderers/factory.py

@@ -327,19 +327,18 @@ class _Factory:
                 ("on_action", PropertyType.function),
                 ("inactive_ids", PropertyType.dynamic_list),
                 ("hover_text", PropertyType.dynamic_string),
-                ("lov", PropertyType.lov)
+                ("lov", PropertyType.lov),
             ]
         )
         ._set_propagate(),
         "navbar": lambda gui, control_type, attrs: _Builder(
             gui=gui, control_type=control_type, element_name="NavBar", attributes=attrs, default_value=None
-        )
-        .set_attributes(
+        ).set_attributes(
             [
                 ("id",),
                 ("active", PropertyType.dynamic_boolean, True),
                 ("hover_text", PropertyType.dynamic_string),
-                ("lov", PropertyType.single_lov)
+                ("lov", PropertyType.single_lov),
             ]
         ),
         "number": lambda gui, control_type, attrs: _Builder(
@@ -414,7 +413,7 @@ class _Factory:
                 ("on_change", PropertyType.function),
                 ("label",),
                 ("mode",),
-                ("lov", PropertyType.lov)
+                ("lov", PropertyType.lov),
             ]
         )
         ._set_propagate(),
@@ -526,7 +525,7 @@ class _Factory:
                 ("allow_unselect", PropertyType.boolean),
                 ("on_change", PropertyType.function),
                 ("mode",),
-                ("lov", PropertyType.single_lov)
+                ("lov", PropertyType.single_lov),
             ]
         )
         ._set_kind()
@@ -561,6 +560,8 @@ class _Factory:
     # TODO: process \" in property value
     _PROPERTY_RE = re.compile(r"\s+([a-zA-Z][\.a-zA-Z_$0-9]*(?:\[(?:.*?)\])?)=\"((?:(?:(?<=\\)\")|[^\"])*)\"")
 
+    __COUNTER = 0
+
     @staticmethod
     def set_library(library: "ElementLibrary"):
         from ..extension.library import Element, ElementLibrary
@@ -620,6 +621,7 @@ class _Factory:
         name = name[len(_Factory.__TAIPY_NAME_SPACE) :] if name.startswith(_Factory.__TAIPY_NAME_SPACE) else name
         builder = _Factory.__CONTROL_BUILDERS.get(name)
         built = None
+        _Factory.__COUNTER += 1
         with gui._get_autorization():
             if builder is None:
                 lib, element_name, element = _Factory.__get_library_element(name)
@@ -627,7 +629,9 @@ class _Factory:
                     from ..extension.library import Element
 
                     if isinstance(element, Element):
-                        return element._call_builder(element_name, gui, all_properties, lib, is_html)
+                        return element._call_builder(
+                            element_name, gui, all_properties, lib, is_html, counter=_Factory.__COUNTER
+                        )
             else:
                 built = builder(gui, name, all_properties)
             if isinstance(built, _Builder):

+ 3 - 1
taipy/gui/_renderers/json.py

@@ -22,7 +22,7 @@ from flask.json.provider import DefaultJSONProvider
 
 from .._warnings import _warn
 from ..icon import Icon
-from ..utils import _date_to_string, _MapDict, _TaipyBase
+from ..utils import _date_to_string, _DoNotUpdate, _MapDict, _TaipyBase
 from ..utils.singleton import _Singleton
 
 
@@ -51,6 +51,8 @@ class _DefaultJsonAdapter(JsonAdapter):
             return str(o)
         if isinstance(o, numpy.generic):
             return getattr(o, "tolist", lambda: o)()
+        if isinstance(o, _DoNotUpdate):
+            return None
 
 
 class _TaipyJsonAdapter(object, metaclass=_Singleton):

+ 24 - 2
taipy/gui/extension/library.py

@@ -42,6 +42,7 @@ class ElementProperty:
         property_type: t.Union[PropertyType, t.Type[_TaipyBase]],
         default_value: t.Optional[t.Any] = None,
         js_name: t.Optional[str] = None,
+        with_update: t.Optional[bool] = None,
     ) -> None:
         """Initializes a new custom property declaration for an `Element^`.
 
@@ -64,6 +65,7 @@ class ElementProperty:
         else:
             self.property_type = property_type
         self._js_name = js_name
+        self.with_update = with_update
         super().__init__()
 
     def check(self, element_name: str, prop_name: str):
@@ -75,7 +77,11 @@ class ElementProperty:
             _warn(f"Property type '{self.property_type}' is invalid for element property '{element_name}.{prop_name}'.")
 
     def _get_tuple(self, name: str) -> tuple:
-        return (name, self.property_type, self.default_value)
+        return (
+            (name, self.property_type, self.default_value)
+            if self.with_update is None
+            else (name, self.property_type, self.default_value, self.with_update)
+        )
 
     def get_js_name(self, name: str) -> str:
         return self._js_name or _to_camel_case(name)
@@ -90,6 +96,7 @@ class Element:
     """
 
     __RE_PROP_VAR = re.compile(r"<tp:prop:(\w+)>")
+    __RE_UNIQUE_VAR = re.compile(r"<tp:uniq:(\w+)>")
 
     def __init__(
         self,
@@ -152,9 +159,11 @@ class Element:
         properties: t.Optional[t.Dict[str, t.Any]],
         lib: "ElementLibrary",
         is_html: t.Optional[bool] = False,
+        counter: int = 0
     ) -> t.Union[t.Any, t.Tuple[str, str]]:
         attributes = properties if isinstance(properties, dict) else {}
         if self.inner_properties:
+            uniques: t.Dict[str, int] = {}
             self.attributes.update(self.inner_properties)
             for prop, attr in self.inner_properties.items():
                 val = attr.default_value
@@ -162,8 +171,21 @@ class Element:
                     # handling property replacement in inner properties <tp:prop:...>
                     while m := Element.__RE_PROP_VAR.search(val):
                         var = attributes.get(m.group(1))
-                        hash_value = "None" if var is None else gui._evaluate_expr(var)
+                        hash_value = None if var is None else gui._evaluate_expr(var)
+                        if hash_value:
+                            names = gui._get_real_var_name(hash_value)
+                            hash_value = names[0] if isinstance(names, tuple) else names
+                        else:
+                            hash_value = "None"
                         val = val[: m.start()] + hash_value + val[m.end() :]
+                    # handling unique id replacement in inner properties <tp:uniq:...>
+                    while m := Element.__RE_UNIQUE_VAR.search(val):
+                        id = uniques.get(m.group(1))
+                        if id is None:
+                            id = len(uniques) + 1
+                            uniques[m.group(1)] = id
+                        val = f"{val[: m.start()]}'{counter}.{id}'{val[m.end() :]}"
+
                 attributes[prop] = val
         # this modifies attributes
         hash_names = _Builder._get_variable_hash_names(gui, attributes)  # variable replacement

+ 27 - 16
taipy/gui/gui.py

@@ -74,6 +74,7 @@ from .state import State
 from .types import _WsType
 from .utils import (
     _delscopeattr,
+    _DoNotUpdate,
     _filter_locals,
     _get_broadcast_var_name,
     _get_client_var_name,
@@ -110,11 +111,6 @@ from .utils.chart_config_builder import _build_chart_config
 from .utils.table_col_builder import _enhance_columns
 
 
-class _DoNotUpdate:
-    def __repr__(self):
-        return "Taipy: Do not update"
-
-
 class Gui:
     """Entry point for the Graphical User Interface generation.
 
@@ -691,6 +687,7 @@ class Gui:
         propagate=True,
         holder: t.Optional[_TaipyBase] = None,
         on_change: t.Optional[str] = None,
+        forward: t.Optional[bool] = True,
     ) -> None:
         if holder:
             var_name = holder.get_name()
@@ -707,17 +704,22 @@ class Gui:
                 derived_vars.update(self._re_evaluate_expr(var_name))
         elif holder:
             derived_vars.update(self._evaluate_holders(hash_expr))
-        # if the variable has been evaluated then skip updating to prevent infinite loop
-        var_modified = self.__is_var_modified_in_context(hash_expr, derived_vars)
-        if not var_modified:
-            self._call_on_change(
-                var_name,
-                value.get() if isinstance(value, _TaipyBase) else value._dict if isinstance(value, _MapDict) else value,
-                on_change,
-            )
-        derived_modified = self.__clean_vars_on_exit()
-        if derived_modified is not None:
-            self.__send_var_list_update(list(derived_modified), var_name)
+        if forward:
+            # if the variable has been evaluated then skip updating to prevent infinite loop
+            var_modified = self.__is_var_modified_in_context(hash_expr, derived_vars)
+            if not var_modified:
+                self._call_on_change(
+                    var_name,
+                    value.get()
+                    if isinstance(value, _TaipyBase)
+                    else value._dict
+                    if isinstance(value, _MapDict)
+                    else value,
+                    on_change,
+                )
+            derived_modified = self.__clean_vars_on_exit()
+            if derived_modified is not None:
+                self.__send_var_list_update(list(derived_modified), var_name)
 
     def _get_real_var_name(self, var_name: str) -> t.Tuple[str, str]:
         if not var_name:
@@ -1044,12 +1046,20 @@ class Gui:
         # TODO: What if value == newvalue?
         self.__send_ws_update_with_dict(ws_dict)
 
+    def __update_state_context(self, payload: dict):
+        # apply state context if any
+        state_context = payload.get("state_context")
+        if isinstance(state_context, dict):
+            for var, val in state_context.items():
+                self._update_var(var, val, True, forward=False)
+
     def __request_data_update(self, var_name: str, payload: t.Any) -> None:
         # Use custom attrgetter function to allow value binding for _MapDict
         newvalue = _getscopeattr_drill(self, var_name)
         if isinstance(newvalue, _TaipyData):
             ret_payload = None
             if isinstance(payload, dict):
+                self.__update_state_context(payload)
                 lib_name = payload.get("library")
                 if isinstance(lib_name, str):
                     libs = self.__extensions.get(lib_name, [])
@@ -1073,6 +1083,7 @@ class Gui:
 
     def __request_var_update(self, payload: t.Any):
         if isinstance(payload, dict) and isinstance(payload.get("names"), list):
+            self.__update_state_context(payload)
             if payload.get("refresh", False):
                 # refresh vars
                 for _var in t.cast(list, payload.get("names")):

+ 1 - 0
taipy/gui/utils/__init__.py

@@ -37,6 +37,7 @@ from .is_debugging import is_debugging
 from .is_port_open import _is_port_open
 from .isnotebook import _is_in_notebook
 from .types import (
+    _DoNotUpdate,
     _TaipyBase,
     _TaipyBool,
     _TaipyContent,

+ 5 - 0
taipy/gui/utils/types.py

@@ -20,6 +20,11 @@ from .._warnings import _warn
 from . import _date_to_string, _MapDict, _string_to_date, _variable_decode
 
 
+class _DoNotUpdate:
+    def __repr__(self):
+        return "Taipy: Do not update"
+
+
 class _TaipyBase(ABC):
     __HOLDER_PREFIXES: t.Optional[t.List[str]] = None
     _HOLDER_PREFIX = "_Tp"

+ 52 - 44
taipy/gui_core/_GuiCoreLib.py

@@ -20,7 +20,6 @@ from ._adapters import (
     _GuiCoreDatanodeAdapter,
     _GuiCoreScenarioAdapter,
     _GuiCoreScenarioDagAdapter,
-    _GuiCoreScenarioNoUpdate,
 )
 from ._context import _GuiCoreContext
 
@@ -31,6 +30,18 @@ class _GuiCore(ElementLibrary):
     __SCENARIO_ADAPTER = "tgc_scenario"
     __DATANODE_ADAPTER = "tgc_datanode"
     __JOB_ADAPTER = "tgc_job"
+    __INNER_VARS = (
+        _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR,
+        _GuiCoreContext._SCENARIO_SELECTOR_ID_VAR,
+        _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR,
+        _GuiCoreContext._JOB_SELECTOR_ERROR_VAR,
+        _GuiCoreContext._DATANODE_VIZ_ERROR_VAR,
+        _GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR,
+        _GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR,
+        _GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR,
+        _GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR,
+        _GuiCoreContext._DATANODE_VIZ_PROPERTIES_ID_VAR,
+    )
 
     __elts = {
         "scenario_selector": Element(
@@ -47,9 +58,14 @@ class _GuiCore(ElementLibrary):
                 "show_pins": ElementProperty(PropertyType.boolean, False),
                 "on_creation": ElementProperty(PropertyType.function),
                 "show_dialog": ElementProperty(PropertyType.boolean, True),
+                _GuiCoreContext._SEL_SCENARIOS_PROP: ElementProperty(PropertyType.dynamic_list),
+                "multiple": ElementProperty(PropertyType.boolean, False),
             },
             inner_properties={
-                "scenarios": ElementProperty(PropertyType.lov, f"{{{__CTX_VAR_NAME}.get_scenarios()}}"),
+                "inner_scenarios": ElementProperty(
+                    PropertyType.lov,
+                    f"{{{__CTX_VAR_NAME}.get_scenarios(<tp:prop:{_GuiCoreContext._SEL_SCENARIOS_PROP}>)}}",
+                ),
                 "on_scenario_crud": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.crud_scenario}}"),
                 "configs": ElementProperty(PropertyType.react, f"{{{__CTX_VAR_NAME}.get_scenario_configs()}}"),
                 "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME),
@@ -117,7 +133,8 @@ class _GuiCore(ElementLibrary):
                 "height": ElementProperty(PropertyType.string, "50vh"),
                 "class_name": ElementProperty(PropertyType.dynamic_string),
                 "show_pins": ElementProperty(PropertyType.boolean, True),
-                _GuiCoreContext._DATANODE_SEL_SCENARIO_PROP: ElementProperty(_GuiCoreScenarioNoUpdate),
+                _GuiCoreContext._DATANODE_SEL_SCENARIO_PROP: ElementProperty(PropertyType.dynamic_list),
+                "multiple": ElementProperty(PropertyType.boolean, False),
             },
             inner_properties={
                 "datanodes": ElementProperty(
@@ -143,7 +160,7 @@ class _GuiCore(ElementLibrary):
                 "show_properties": ElementProperty(PropertyType.boolean, True),
                 "show_history": ElementProperty(PropertyType.boolean, True),
                 "show_data": ElementProperty(PropertyType.boolean, True),
-                "chart_config": ElementProperty(PropertyType.dict),
+                "chart_configs": ElementProperty(PropertyType.dict),
                 "class_name": ElementProperty(PropertyType.dynamic_string),
                 "scenario": ElementProperty(PropertyType.lov_value, "optional"),
                 "width": ElementProperty(PropertyType.string),
@@ -154,44 +171,55 @@ class _GuiCore(ElementLibrary):
                 "error": ElementProperty(PropertyType.react, f"{{{_GuiCoreContext._DATANODE_VIZ_ERROR_VAR}}}"),
                 "scenarios": ElementProperty(
                     PropertyType.lov,
-                    f"{{{__CTX_VAR_NAME}.get_scenarios_for_owner({_GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR})}}",
+                    f"{{{__CTX_VAR_NAME}.get_scenarios_for_owner({_GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR},"
+                    + "<tp:uniq:dn>)}",
                 ),
                 "type": ElementProperty(PropertyType.inner, __SCENARIO_ADAPTER),
-                "on_id_select": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.select_id}}"),
-                "history": ElementProperty(
+                "dn_properties": ElementProperty(
                     PropertyType.react,
-                    f"{{{__CTX_VAR_NAME}.get_data_node_history("
-                    + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, "
-                    + f"{_GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR})}}",
+                    f"{{{__CTX_VAR_NAME}.get_data_node_properties("
+                    + f"{_GuiCoreContext._DATANODE_VIZ_PROPERTIES_ID_VAR},"
+                    + "<tp:uniq:dn>)}",
                 ),
-                "data": ElementProperty(
+                "history": ElementProperty(
                     PropertyType.react,
-                    f"{{{__CTX_VAR_NAME}.get_data_node_data(<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>,"
-                    + f" {_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR})}}",
+                    f"{{{__CTX_VAR_NAME}.get_data_node_history("
+                    + f"{_GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR},"
+                    + "<tp:uniq:dn>)}",
                 ),
                 "tabular_data": ElementProperty(
                     PropertyType.data,
                     f"{{{__CTX_VAR_NAME}.get_data_node_tabular_data("
-                    + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, "
-                    + f"{_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR})}}",
+                    + f"{_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR},"
+                    + "<tp:uniq:dn>)}",
                 ),
                 "tabular_columns": ElementProperty(
                     PropertyType.dynamic_string,
                     f"{{{__CTX_VAR_NAME}.get_data_node_tabular_columns("
-                    + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, "
-                    + f"{_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR})}}",
+                    + f"{_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR},"
+                    + "<tp:uniq:dn>)}",
+                    with_update=True,
                 ),
                 "chart_config": ElementProperty(
                     PropertyType.dynamic_string,
                     f"{{{__CTX_VAR_NAME}.get_data_node_chart_config("
-                    + f"<tp:prop:{_GuiCoreContext._DATANODE_VIZ_DATA_NODE_PROP}>, "
-                    + f"{_GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR})}}",
+                    + f"{_GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR},"
+                    + "<tp:uniq:dn>)}",
+                    with_update=True,
                 ),
                 "on_data_value": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.update_data}}"),
                 "on_tabular_data_edit": ElementProperty(
                     PropertyType.function, f"{{{__CTX_VAR_NAME}.tabular_data_edit}}"
                 ),
                 "on_lock": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.lock_datanode_for_edit}}"),
+                "update_dn_vars": ElementProperty(
+                    PropertyType.string,
+                    f"data_id={_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR};"
+                    + f"history_id={_GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR};"
+                    + f"owner_id={_GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR};"
+                    + f"chart_id={_GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR};"
+                    + f"properties_id={_GuiCoreContext._DATANODE_VIZ_PROPERTIES_ID_VAR}",
+                ),
             },
         ),
         "job_selector": Element(
@@ -230,19 +258,7 @@ class _GuiCore(ElementLibrary):
         return ["lib/taipy-gui-core.js"]
 
     def on_init(self, gui: Gui) -> t.Optional[t.Tuple[str, t.Any]]:
-        gui._get_default_locals_bind().update(
-            {
-                _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR: "",
-                _GuiCoreContext._SCENARIO_SELECTOR_ID_VAR: "",
-                _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR: "",
-                _GuiCoreContext._JOB_SELECTOR_ERROR_VAR: "",
-                _GuiCoreContext._DATANODE_VIZ_ERROR_VAR: "",
-                _GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR: "",
-                _GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR: "",
-                _GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR: "",
-                _GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR: "",
-            }
-        )
+        gui._get_default_locals_bind().update({v: "" for v in _GuiCore.__INNER_VARS})
         ctx = _GuiCoreContext(gui)
         gui._add_adapter_for_type(_GuiCore.__SCENARIO_ADAPTER, ctx.scenario_adapter)
         gui._add_adapter_for_type(_GuiCore.__DATANODE_ADAPTER, ctx.data_node_adapter)
@@ -250,20 +266,12 @@ class _GuiCore(ElementLibrary):
         return _GuiCore.__CTX_VAR_NAME, ctx
 
     def on_user_init(self, state: State):
-        for var in [
-            _GuiCoreContext._SCENARIO_SELECTOR_ERROR_VAR,
-            _GuiCoreContext._SCENARIO_SELECTOR_ID_VAR,
-            _GuiCoreContext._SCENARIO_VIZ_ERROR_VAR,
-            _GuiCoreContext._JOB_SELECTOR_ERROR_VAR,
-            _GuiCoreContext._DATANODE_VIZ_ERROR_VAR,
-            _GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR,
-            _GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR,
-            _GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR,
-            _GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR,
-        ]:
+        for var in _GuiCore.__INNER_VARS:
             state._add_attribute(var, "")
 
     def get_version(self) -> str:
         if not hasattr(self, "version"):
-            self.version = _get_version() + str(datetime.now().timestamp())
+            self.version = _get_version()
+            if "dev" in self.version:
+                self.version += str(datetime.now().timestamp())
         return self.version

+ 48 - 6
taipy/gui_core/_adapters.py

@@ -9,8 +9,12 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+import math
 import typing as t
 from enum import Enum
+from numbers import Number
+
+import pandas as pd
 
 from taipy.core import (
     Cycle,
@@ -26,6 +30,7 @@ from taipy.core import (
     is_submittable,
 )
 from taipy.core import get as core_get
+from taipy.core.data._tabular_datanode_mixin import _TabularDataNodeMixin
 from taipy.gui._warnings import _warn
 from taipy.gui.gui import _DoNotUpdate
 from taipy.gui.utils import _TaipyBase
@@ -57,6 +62,8 @@ class _GuiCoreScenarioAdapter(_TaipyBase):
 
     def get(self):
         data = super().get()
+        if isinstance(data, (list, tuple)) and len(data) == 1:
+            data = data[0]
         if isinstance(data, Scenario):
             try:
                 if scenario := core_get(data.id):
@@ -113,6 +120,8 @@ class _GuiCoreScenarioDagAdapter(_TaipyBase):
 
     def get(self):
         data = super().get()
+        if isinstance(data, (list, tuple)) and len(data) == 1:
+            data = data[0]
         if isinstance(data, Scenario):
             try:
                 if scenario := core_get(data.id):
@@ -158,10 +167,47 @@ class _GuiCoreScenarioNoUpdate(_TaipyBase, _DoNotUpdate):
 
 
 class _GuiCoreDatanodeAdapter(_TaipyBase):
-    __INNER_PROPS = ["name"]
+
+    @staticmethod
+    def _is_tabular_data(datanode: DataNode, value: t.Any):
+        if isinstance(datanode, _TabularDataNodeMixin):
+            return True
+        if datanode.is_ready_for_reading:
+            return isinstance(value, (pd.DataFrame, pd.Series, list, tuple, dict))
+        return False
+
+    def __get_data(self, dn: DataNode):
+            if dn._last_edit_date:
+                if isinstance(dn, _TabularDataNodeMixin):
+                    return (None, None, True, None)
+                try:
+                    value = dn.read()
+                    if _GuiCoreDatanodeAdapter._is_tabular_data(dn, value):
+                        return (None, None, True, None)
+                    val_type = (
+                        "date"
+                        if "date" in type(value).__name__
+                        else type(value).__name__
+                        if isinstance(value, Number)
+                        else None
+                    )
+                    if isinstance(value, float):
+                        if math.isnan(value):
+                            value = None
+                    return (
+                        value,
+                        val_type,
+                        None,
+                        None,
+                    )
+                except Exception as e:
+                    return (None, None, None, f"read data_node: {e}")
+            return (None, None, None, f"Data unavailable for {dn.get_simple_label()}")
 
     def get(self):
         data = super().get()
+        if isinstance(data, (list, tuple)) and len(data) == 1:
+            data = data[0]
         if isinstance(data, DataNode):
             try:
                 if datanode := core_get(data.id):
@@ -180,11 +226,7 @@ class _GuiCoreDatanodeAdapter(_TaipyBase):
                         else _EntityType.SCENARIO.value
                         if isinstance(owner, Scenario)
                         else -1,
-                        [
-                            (k, f"{v}")
-                            for k, v in datanode._get_user_properties().items()
-                            if k not in _GuiCoreDatanodeAdapter.__INNER_PROPS
-                        ],
+                        self.__get_data(datanode),
                         datanode._edit_in_progress,
                         datanode._editor_id,
                         is_readable(datanode),

+ 48 - 96
taipy/gui_core/_context.py

@@ -10,7 +10,6 @@
 # specific language governing permissions and limitations under the License.
 
 import json
-import math
 import typing as t
 from collections import defaultdict
 from numbers import Number
@@ -52,7 +51,6 @@ from taipy.core import (
 from taipy.core import delete as core_delete
 from taipy.core import get as core_get
 from taipy.core import submit as core_submit
-from taipy.core.data._abstract_tabular import _TabularDataNodeMixin
 from taipy.core.notification import CoreEventConsumerBase, EventEntityType
 from taipy.core.notification.event import Event, EventOperation
 from taipy.core.notification.notifier import Notifier
@@ -61,7 +59,7 @@ from taipy.gui import Gui, State
 from taipy.gui._warnings import _warn
 from taipy.gui.gui import _DoNotUpdate
 
-from ._adapters import _EntityType
+from ._adapters import _EntityType, _GuiCoreDatanodeAdapter
 
 
 class _GuiCoreContext(CoreEventConsumerBase):
@@ -82,15 +80,17 @@ class _GuiCoreContext(CoreEventConsumerBase):
     _DATANODE_VIZ_ERROR_VAR = "gui_core_dv_error"
     _DATANODE_VIZ_OWNER_ID_VAR = "gui_core_dv_owner_id"
     _DATANODE_VIZ_HISTORY_ID_VAR = "gui_core_dv_history_id"
+    _DATANODE_VIZ_PROPERTIES_ID_VAR = "gui_core_dv_properties_id"
     _DATANODE_VIZ_DATA_ID_VAR = "gui_core_dv_data_id"
     _DATANODE_VIZ_DATA_CHART_ID_VAR = "gui_core_dv_data_chart_id"
     _DATANODE_VIZ_DATA_NODE_PROP = "data_node"
     _DATANODE_SEL_SCENARIO_PROP = "scenario"
+    _SEL_SCENARIOS_PROP = "scenarios"
 
     def __init__(self, gui: Gui) -> None:
         self.gui = gui
         self.scenario_by_cycle: t.Optional[t.Dict[t.Optional[Cycle], t.List[Scenario]]] = None
-        self.data_nodes_by_owner: t.Optional[t.Dict[t.Optional[str], DataNode]] = None
+        self.data_nodes_by_owner: t.Optional[t.Dict[t.Optional[str], t.List[DataNode]]] = None
         self.scenario_configs: t.Optional[t.List[t.Tuple[str, str]]] = None
         self.jobs_list: t.Optional[t.List[Job]] = None
         self.client_submission: t.Dict[str, SubmissionStatus] = {}
@@ -241,16 +241,19 @@ class _GuiCoreContext(CoreEventConsumerBase):
             )
         return None
 
-    def get_scenarios(self):
-        cycles_scenarios = []
-        with self.lock:
-            if self.scenario_by_cycle is None:
-                self.scenario_by_cycle = get_cycles_scenarios()
-            for cycle, scenarios in self.scenario_by_cycle.items():
-                if cycle is None:
-                    cycles_scenarios.extend(scenarios)
-                else:
-                    cycles_scenarios.append(cycle)
+    def get_scenarios(self, scenarios: t.Optional[t.List[t.Union[Cycle, Scenario]]]):
+        cycles_scenarios: t.List[t.Union[Cycle, Scenario]] = []
+        if scenarios is None:
+            with self.lock:
+                if self.scenario_by_cycle is None:
+                    self.scenario_by_cycle = get_cycles_scenarios()
+                for cycle, c_scenarios in self.scenario_by_cycle.items():
+                    if cycle is None:
+                        cycles_scenarios.extend(c_scenarios)
+                    else:
+                        cycles_scenarios.append(cycle)
+        else:
+            cycles_scenarios = scenarios
         return sorted(cycles_scenarios, key=_GuiCoreContext.get_entity_creation_date_iso)
 
     def select_scenario(self, state: State, id: str, payload: t.Dict[str, str]):
@@ -502,12 +505,17 @@ class _GuiCoreContext(CoreEventConsumerBase):
             for dn in get_data_nodes():
                 self.data_nodes_by_owner[dn.owner_id].append(dn)
 
-    def get_datanodes_tree(self, scenario: t.Optional[Scenario]):
+    def get_datanodes_tree(self, scenarios: t.Optional[t.Union[Scenario, t.List[Scenario]]]):
         with self.lock:
             self.__do_datanodes_tree()
-        return (
-            self.data_nodes_by_owner.get(scenario.id if scenario else None, []) if self.data_nodes_by_owner else []
-        ) + (self.get_scenarios() if not scenario else [])
+        if scenarios is None:
+            return (self.data_nodes_by_owner.get(None) if self.data_nodes_by_owner else []) + self.get_scenarios(None)
+        if not self.data_nodes_by_owner:
+            return []
+        if isinstance(scenarios, (list, tuple)) and len(scenarios) > 1:
+            return scenarios
+        owners = scenarios if isinstance(scenarios, (list, tuple)) else [scenarios]
+        return [d for owner in owners for d in t.cast(list, self.data_nodes_by_owner.get(owner.id))]
 
     def data_node_adapter(self, data):
         try:
@@ -680,7 +688,7 @@ class _GuiCoreContext(CoreEventConsumerBase):
         # we might be comparing naive and aware datetime ISO
         return entity.creation_date.isoformat()
 
-    def get_scenarios_for_owner(self, owner_id: str):
+    def get_scenarios_for_owner(self, owner_id: str, uid: str):
         cycles_scenarios: t.List[t.Union[Scenario, Cycle]] = []
         with self.lock:
             if self.scenario_by_cycle is None:
@@ -700,14 +708,8 @@ class _GuiCoreContext(CoreEventConsumerBase):
                         cycles_scenarios.append(entity)
         return sorted(cycles_scenarios, key=_GuiCoreContext.get_entity_creation_date_iso)
 
-    def get_data_node_history(self, datanode: DataNode, id: str):
-        if (
-            id
-            and isinstance(datanode, DataNode)
-            and id == datanode.id
-            and (dn := core_get(id))
-            and isinstance(dn, DataNode)
-        ):
+    def get_data_node_history(self, id: str, uid: str):
+        if id and (dn := core_get(id)) and isinstance(dn, DataNode):
             res = []
             for e in dn.edits:
                 job_id = e.get("job_id")
@@ -729,50 +731,6 @@ class _GuiCoreContext(CoreEventConsumerBase):
             return sorted(res, key=lambda r: r[0], reverse=True)
         return _DoNotUpdate()
 
-    @staticmethod
-    def __is_tabular_data(datanode: DataNode, value: t.Any):
-        if isinstance(datanode, _TabularDataNodeMixin):
-            return True
-        if datanode.is_ready_for_reading:
-            return isinstance(value, (pd.DataFrame, pd.Series, list, tuple, dict))
-        return False
-
-    def get_data_node_data(self, datanode: DataNode, id: str):
-        if (
-            id
-            and isinstance(datanode, DataNode)
-            and id == datanode.id
-            and (dn := core_get(id))
-            and isinstance(dn, DataNode)
-        ):
-            if dn._last_edit_date:
-                if isinstance(dn, _TabularDataNodeMixin):
-                    return (None, None, True, None)
-                try:
-                    value = dn.read()
-                    if _GuiCoreContext.__is_tabular_data(dn, value):
-                        return (None, None, True, None)
-                    val_type = (
-                        "date"
-                        if "date" in type(value).__name__
-                        else type(value).__name__
-                        if isinstance(value, Number)
-                        else None
-                    )
-                    if isinstance(value, float):
-                        if math.isnan(value):
-                            value = None
-                    return (
-                        value,
-                        val_type,
-                        None,
-                        None,
-                    )
-                except Exception as e:
-                    return (None, None, None, f"read data_node: {e}")
-            return (None, None, None, f"Data unavailable for {dn.get_simple_label()}")
-        return _DoNotUpdate()
-
     def __check_readable_editable(self, state: State, id: str, ent_type: str, var: str):
         if not is_readable(t.cast(ScenarioId, id)):
             state.assign(var, f"{ent_type} {id} is not readable.")
@@ -872,14 +830,26 @@ class _GuiCoreContext(CoreEventConsumerBase):
                 state.assign(_GuiCoreContext._DATANODE_VIZ_ERROR_VAR, f"Error updating Datanode tabular value. {e}")
         setattr(state, _GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR, dn_id)
 
+    def get_data_node_properties(self, id: str, uid: str):
+        if id and is_readable(t.cast(DataNodeId, id)) and (dn := core_get(id)) and isinstance(dn, DataNode):
+            try:
+                return (
+                    (
+                        (k, f"{v}")
+                        for k, v in dn._get_user_properties().items()
+                        if k != _GuiCoreContext.__PROP_ENTITY_NAME
+                    ),
+                )
+            except Exception:
+                return None
+        return None
+
     def __read_tabular_data(self, datanode: DataNode):
         return datanode.read()
 
-    def get_data_node_tabular_data(self, datanode: DataNode, id: str):
+    def get_data_node_tabular_data(self, id: str, uid: str):
         if (
             id
-            and isinstance(datanode, DataNode)
-            and id == datanode.id
             and is_readable(t.cast(DataNodeId, id))
             and (dn := core_get(id))
             and isinstance(dn, DataNode)
@@ -887,17 +857,15 @@ class _GuiCoreContext(CoreEventConsumerBase):
         ):
             try:
                 value = self.__read_tabular_data(dn)
-                if _GuiCoreContext.__is_tabular_data(dn, value):
+                if _GuiCoreDatanodeAdapter._is_tabular_data(dn, value):
                     return value
             except Exception:
                 return None
         return None
 
-    def get_data_node_tabular_columns(self, datanode: DataNode, id: str):
+    def get_data_node_tabular_columns(self, id: str, uid: str):
         if (
             id
-            and isinstance(datanode, DataNode)
-            and id == datanode.id
             and is_readable(t.cast(DataNodeId, id))
             and (dn := core_get(id))
             and isinstance(dn, DataNode)
@@ -905,7 +873,7 @@ class _GuiCoreContext(CoreEventConsumerBase):
         ):
             try:
                 value = self.__read_tabular_data(dn)
-                if _GuiCoreContext.__is_tabular_data(dn, value):
+                if _GuiCoreDatanodeAdapter._is_tabular_data(dn, value):
                     return self.gui._tbl_cols(
                         True, True, "{}", json.dumps({"data": "tabular_data"}), tabular_data=value
                     )
@@ -913,11 +881,9 @@ class _GuiCoreContext(CoreEventConsumerBase):
                 return None
         return None
 
-    def get_data_node_chart_config(self, datanode: DataNode, id: str):
+    def get_data_node_chart_config(self, id: str, uid: str):
         if (
             id
-            and isinstance(datanode, DataNode)
-            and id == datanode.id
             and is_readable(t.cast(DataNodeId, id))
             and (dn := core_get(id))
             and isinstance(dn, DataNode)
@@ -931,20 +897,6 @@ class _GuiCoreContext(CoreEventConsumerBase):
                 return None
         return None
 
-    def select_id(self, state: State, id: str, payload: t.Dict[str, str]):
-        args = payload.get("args")
-        if args is None or not isinstance(args, list) or len(args) == 0 and isinstance(args[0], dict):
-            return
-        data = args[0]
-        if owner_id := data.get("owner_id"):
-            state.assign(_GuiCoreContext._DATANODE_VIZ_OWNER_ID_VAR, owner_id)
-        elif history_id := data.get("history_id"):
-            state.assign(_GuiCoreContext._DATANODE_VIZ_HISTORY_ID_VAR, history_id)
-        elif data_id := data.get("data_id"):
-            state.assign(_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR, data_id)
-        elif chart_id := data.get("chart_id"):
-            state.assign(_GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR, chart_id)
-
     def on_dag_select(self, state: State, id: str, payload: t.Dict[str, str]):
         args = payload.get("args")
         if args is None or not isinstance(args, list) or len(args) < 2:

+ 60 - 8
taipy/gui_core/viselements.json

@@ -63,16 +63,40 @@
                         "doc": "If True, a pin is shown on each item of the selector and allows to restrict the number of displayed items."
                     },
                     {
-                      "name": "on_creation",
-                      "type": "Callback",
-                      "doc": "The name of the function that is triggered when a scenario is about to be created.<br/><br/>All the parameters of that function are optional:\n<ul>\n<li>state (<code>State^</code>): the state instance.</li>\n<li>id (str): the identifier of the scenario selector.</li>\n<li>payload (dict): the details on this callback's invocation.<br/>\nThis dictionary has the following keys:\n<ul>\n<li>config: the name of the selected scenario configuration.</li>\n<li>date: the creation date for the new scenario.</li>\n<li>label: the user-specified label.</li>\n<li>properties: a dictionary containing all the user-defined custom properties.</li>\n</ul>\n</li>\n<li>The callback function can return a scenario, a string containing an error message (a scenario will not be created), or None (then a new scenario is created with the user parameters).</li>\n</ul>",
-                      "signature": [["state", "State"], ["id", "str"], ["payload", "dict"]]
+                        "name": "on_creation",
+                        "type": "Callback",
+                        "doc": "The name of the function that is triggered when a scenario is about to be created.<br/><br/>All the parameters of that function are optional:\n<ul>\n<li>state (<code>State^</code>): the state instance.</li>\n<li>id (str): the identifier of the scenario selector.</li>\n<li>payload (dict): the details on this callback's invocation.<br/>\nThis dictionary has the following keys:\n<ul>\n<li>config: the name of the selected scenario configuration.</li>\n<li>date: the creation date for the new scenario.</li>\n<li>label: the user-specified label.</li>\n<li>properties: a dictionary containing all the user-defined custom properties.</li>\n</ul>\n</li>\n<li>The callback function can return a scenario, a string containing an error message (a scenario will not be created), or None (then a new scenario is created with the user parameters).</li>\n</ul>",
+                        "signature": [
+                            [
+                                "state",
+                                "State"
+                            ],
+                            [
+                                "id",
+                                "str"
+                            ],
+                            [
+                                "payload",
+                                "dict"
+                            ]
+                        ]
                     },
                     {
                         "name": "show_dialog",
                         "type": "bool",
                         "default_value": "True",
                         "doc": "If True, a dialog is shown when the user click on the 'Add scenario' button."
+                    },
+                    {
+                        "name": "scenarios",
+                        "type": "dynamic(list[Scenario|Cycle])",
+                        "doc": "TODO: The list of Scenario/Cycle to show. Shows all Cycle/Scenario if value is None."
+                    },
+                    {
+                        "name": "multiple",
+                        "type": "bool",
+                        "default_value": "False",
+                        "doc": "TODO: If True, the user can select multiple scenarios."
                     }
                 ]
             }
@@ -166,7 +190,20 @@
                         "name": "on_submission_change",
                         "type": "Callback",
                         "doc": "The name of the function that is triggered when a submission status is changed.<br/><br/>All the parameters of that function are optional:\n<ul>\n<li>state (<code>State^</code>): the state instance.</li>\n<li>submittable (Submittable): the entity (usually a Scenario) that was submitted.</li>\n<li>details (dict): the details on this callback's invocation.<br/>\nThis dictionary has the following keys:\n<ul>\n<li>submission_status (str): the new status of the submission (possible values: SUBMITTED, COMPLETED, CANCELED, FAILED, BLOCKED, WAITING, RUNNING).</li>\n<li>job: the Job (if any) that is at the origin of the submission status change.</li>\n</ul>",
-                        "signature": [["state", "State"], ["submittable", "Submittable"], ["details", "dict"]]
+                        "signature": [
+                            [
+                                "state",
+                                "State"
+                            ],
+                            [
+                                "submittable",
+                                "Submittable"
+                            ],
+                            [
+                                "details",
+                                "dict"
+                            ]
+                        ]
                     }
                 ]
             }
@@ -212,7 +249,16 @@
                         "name": "on_action",
                         "type": "Callback",
                         "doc": "The name of the function that is triggered when a a node is selected.<br/><br/>All the parameters of that function are optional:\n<ul>\n<li>state (<code>State^</code>): the state instance.</li>\n<li>entity (DataNode | Task): the entity (DataNode or Task) that was selected.</li>\n</ul>",
-                        "signature": [["state", "State"], ["entity", "Task | DataNode"]]
+                        "signature": [
+                            [
+                                "state",
+                                "State"
+                            ],
+                            [
+                                "entity",
+                                "Task | DataNode"
+                            ]
+                        ]
                     }
                 ]
             }
@@ -275,8 +321,14 @@
                     },
                     {
                         "name": "scenario",
-                        "type": "dynamic(Scenario)",
-                        "doc": "If the <code>Scenario^</code> is set, the selector will only show datanodes owned by this scenario."
+                        "type": "dynamic(Scenario|list[Scenario])",
+                        "doc": "TODO: If the <code>Scenario^</code> is set, the selector will only show datanodes owned by this scenario."
+                    },
+                    {
+                        "name": "multiple",
+                        "type": "bool",
+                        "default_value": "False",
+                        "doc": "TODO: If True, the user can select multiple datanodes."
                     }
                 ]
             }

+ 2 - 3
tests/core/_orchestrator/_dispatcher/test_dispatcher__needs_to_run.py

@@ -77,9 +77,8 @@ def test_need_to_run_skippable_task_with_validity_period_on_output():
 
     assert dispatcher._needs_to_run(task)  # output data is not edited
 
-    output_edit_time = datetime.now()  # edit time
-    with freezegun.freeze_time(output_edit_time):
-        task.output["output"].write("Hello world !")  # output data is edited
+    task.output["output"].write("Hello world !")  # output data is edited
+    output_edit_time = task.output["output"].last_edit_date
 
     with freezegun.freeze_time(output_edit_time + timedelta(minutes=30)):  # 30 min after edit time
         assert not dispatcher._needs_to_run(task)  # output data is written and validity period not expired

+ 7 - 4
tests/core/_orchestrator/test_orchestrator__submit.py

@@ -8,6 +8,7 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
+
 from datetime import datetime, timedelta
 from unittest import mock
 
@@ -62,9 +63,9 @@ def test_submit_scenario_development_mode():
 
     # data nodes should have been written (except the input dn_0)
     assert scenario.dn_0.last_edit_date < submit_time
-    assert scenario.dn_1.last_edit_date == submit_time
-    assert scenario.dn_2.last_edit_date == submit_time
-    assert scenario.dn_3.last_edit_date == submit_time
+    assert scenario.dn_1.last_edit_date is not None
+    assert scenario.dn_2.last_edit_date is not None
+    assert scenario.dn_3.last_edit_date is not None
 
     # jobs are created in a specific order and are correct
     assert len(jobs) == 4
@@ -339,7 +340,9 @@ def test_submit_sequence_development_mode():
 
     # data nodes should have been written (except the input dn_0)
     assert sce.dn_0.last_edit_date < submit_time
-    assert sce.dn_1.last_edit_date == submit_time == sce.dn_2.last_edit_date == sce.dn_3.last_edit_date
+    assert sce.dn_1.last_edit_date is not None
+    assert sce.dn_2.last_edit_date is not None
+    assert sce.dn_3.last_edit_date is not None
 
     # jobs are created in a specific order and are correct
     assert len(jobs) == 3

+ 4 - 3
tests/core/_orchestrator/test_orchestrator__submit_task.py

@@ -8,7 +8,8 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
-from datetime import datetime
+
+from datetime import datetime, timedelta
 from unittest import mock
 
 import freezegun
@@ -46,7 +47,7 @@ def test_submit_task_development_mode():
     scenario = create_scenario()
     orchestrator = _OrchestratorFactory._build_orchestrator()
 
-    submit_time = datetime.now()
+    submit_time = datetime.now() + timedelta(seconds=1)  # +1 to ensure the edit time of dn_0 is before the submit time
     with freezegun.freeze_time(submit_time):
         submission = orchestrator.submit_task(
             scenario.t1, no_of_retry=10, log=True, log_file="file_path"
@@ -54,7 +55,7 @@ def test_submit_task_development_mode():
         job = submission.jobs[0]
 
     # task output should have been written
-    assert scenario.dn_1.last_edit_date == submit_time
+    assert scenario.dn_1.last_edit_date is not None
 
     # job exists and is correct
     assert job.task == scenario.t1

+ 220 - 0
tests/core/config/test_config_schema.py

@@ -0,0 +1,220 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import json
+
+import pytest
+from jsonschema import ValidationError, validate
+
+with open("taipy/core/config/config.schema.json", "r") as f:
+    json_schema = json.load(f)
+
+
+def test_validate_generic_datanode_config():
+    generic_cfg_without_read_write_fct = {"DATA_NODE": {"properties": {"storage_type": "generic"}}}
+    with pytest.raises(ValidationError):
+        validate(generic_cfg_without_read_write_fct, json_schema)
+
+    generic_cfg_without_write_fct = {
+        "DATA_NODE": {"properties": {"storage_type": "generic", "read_fct": "module.read_fct"}}
+    }
+    with pytest.raises(ValidationError):
+        validate(generic_cfg_without_write_fct, json_schema)
+
+    generic_cfg_without_read_fct = {
+        "DATA_NODE": {"properties": {"storage_type": "generic", "write_fct": "module.write_fct"}}
+    }
+    with pytest.raises(ValidationError):
+        validate(generic_cfg_without_read_fct, json_schema)
+
+    generic_cfg_with_read_write_fct = {
+        "DATA_NODE": {
+            "properties": {"storage_type": "generic", "read_fct": "module.read_fct", "write_fct": "module.write_fct"}
+        }
+    }
+    validate(generic_cfg_with_read_write_fct, json_schema)
+
+
+def test_validate_sql_datanode_config():
+    sql_cfg_sqlite_without_required_properties = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql",
+                "db_engine": "sqlite",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(sql_cfg_sqlite_without_required_properties, json_schema)
+
+    sql_cfg_sqlite = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql",
+                "db_engine": "sqlite",
+                "db_name": "name",
+                "read_query": "SELECT * FROM table",
+                "write_query_builder": "module.write_query_builder",
+            }
+        }
+    }
+    validate(sql_cfg_sqlite, json_schema)
+
+    sql_cfg_notsqlite_without_required_properties = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql",
+                "db_engine": "mysql",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(sql_cfg_notsqlite_without_required_properties, json_schema)
+
+    sql_cfg_notsqlite_without_username_password = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql",
+                "db_engine": "mysql",
+                "db_name": "name",
+                "read_query": "SELECT * FROM table",
+                "write_query_builder": "module.write_query_builder",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(sql_cfg_notsqlite_without_username_password, json_schema)
+
+    sql_cfg_notsqlite = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql",
+                "db_engine": "mysql",
+                "db_name": "name",
+                "db_username": "user",
+                "db_password": "pass",
+                "read_query": "SELECT * FROM table",
+                "write_query_builder": "module.write_query_builder",
+            }
+        }
+    }
+    validate(sql_cfg_notsqlite, json_schema)
+
+
+def test_validate_sql_table_datanode_config():
+    sql_table_cfg_sqlite_without_required_properties = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql_table",
+                "db_engine": "sqlite",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(sql_table_cfg_sqlite_without_required_properties, json_schema)
+
+    sql_table_cfg_sqlite = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql_table",
+                "db_engine": "sqlite",
+                "db_name": "name",
+                "table_name": "table",
+            }
+        }
+    }
+    validate(sql_table_cfg_sqlite, json_schema)
+
+    sql_table_cfg_notsqlite_without_required_properties = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql_table",
+                "db_engine": "mysql",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(sql_table_cfg_notsqlite_without_required_properties, json_schema)
+
+    sql_table_cfg_notsqlite_without_username_password = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql_table",
+                "db_engine": "mysql",
+                "db_name": "name",
+                "table_name": "table",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(sql_table_cfg_notsqlite_without_username_password, json_schema)
+
+    sql_table_cfg_notsqlite = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "sql_table",
+                "db_engine": "mysql",
+                "db_name": "name",
+                "db_username": "user",
+                "db_password": "pass",
+                "table_name": "table",
+            }
+        }
+    }
+    validate(sql_table_cfg_notsqlite, json_schema)
+
+
+def test_validate_mongo_collection_datanode_config():
+    mongo_collection_cfg_without_required_properties = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "mongo_collection",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(mongo_collection_cfg_without_required_properties, json_schema)
+
+    mongo_collection_cfg = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "mongo_collection",
+                "db_name": "name",
+                "collection_name": "collection",
+            }
+        }
+    }
+    validate(mongo_collection_cfg, json_schema)
+
+
+def test_validate_s3_object_datanode_config():
+    s3_object_cfg_without_required_properties = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "s3_object",
+            }
+        }
+    }
+    with pytest.raises(ValidationError):
+        validate(s3_object_cfg_without_required_properties, json_schema)
+
+    s3_object_cfg = {
+        "DATA_NODE": {
+            "properties": {
+                "storage_type": "s3_object",
+                "aws_access_key": "access_key",
+                "aws_secret_access_key": "secret_access_key",
+                "aws_s3_bucket_name": "bucket",
+                "aws_s3_object_key": "object_key",
+            }
+        }
+    }
+    validate(s3_object_cfg, json_schema)

+ 21 - 15
tests/core/conftest.py

@@ -44,7 +44,7 @@ from taipy.core.cycle.cycle import Cycle
 from taipy.core.cycle.cycle_id import CycleId
 from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data._data_model import _DataNodeModel
-from taipy.core.data.in_memory import InMemoryDataNode
+from taipy.core.data.in_memory import DataNodeId, InMemoryDataNode
 from taipy.core.job._job_manager_factory import _JobManagerFactory
 from taipy.core.job.job import Job
 from taipy.core.job.job_id import JobId
@@ -59,7 +59,7 @@ from taipy.core.sequence.sequence_id import SequenceId
 from taipy.core.submission._submission_manager_factory import _SubmissionManagerFactory
 from taipy.core.submission.submission import Submission
 from taipy.core.task._task_manager_factory import _TaskManagerFactory
-from taipy.core.task.task import Task
+from taipy.core.task.task import Task, TaskId
 
 current_time = datetime.now()
 
@@ -176,15 +176,6 @@ def default_multi_sheet_data_frame():
     }
 
 
-@pytest.fixture(scope="session", autouse=True)
-def cleanup_files():
-    yield
-
-    for path in [".data", ".my_data", "user_data", ".taipy"]:
-        if os.path.exists(path):
-            shutil.rmtree(path, ignore_errors=True)
-
-
 @pytest.fixture(scope="function")
 def current_datetime():
     return current_time
@@ -197,7 +188,7 @@ def scenario(cycle):
         set(),
         {},
         set(),
-        ScenarioId("sc_id"),
+        ScenarioId("SCENARIO_scenario_id"),
         current_time,
         is_primary=False,
         tags={"foo"},
@@ -208,7 +199,9 @@ def scenario(cycle):
 
 @pytest.fixture(scope="function")
 def data_node():
-    return InMemoryDataNode("data_node_config_id", Scope.SCENARIO, version="random_version_number")
+    return InMemoryDataNode(
+        "data_node_config_id", Scope.SCENARIO, version="random_version_number", id=DataNodeId("DATANODE_data_node_id")
+    )
 
 
 @pytest.fixture(scope="function")
@@ -234,7 +227,7 @@ def data_node_model():
 @pytest.fixture(scope="function")
 def task(data_node):
     dn = InMemoryDataNode("dn_config_id", Scope.SCENARIO, version="random_version_number")
-    return Task("task_config_id", {}, print, [data_node], [dn])
+    return Task("task_config_id", {}, print, [data_node], [dn], TaskId("TASK_task_id"))
 
 
 @pytest.fixture(scope="function")
@@ -264,7 +257,7 @@ def cycle():
         start_date=example_date,
         end_date=example_date,
         name="cc",
-        id=CycleId("cc_id"),
+        id=CycleId("CYCLE_cycle_id"),
     )
 
 
@@ -314,6 +307,19 @@ def tmp_sqlite(tmpdir_factory):
     return os.path.join(fn.strpath, "test.db")
 
 
+@pytest.fixture(scope="session", autouse=True)
+def cleanup_files():
+    for path in [".data", ".my_data", "user_data", ".taipy"]:
+        if os.path.exists(path):
+            shutil.rmtree(path, ignore_errors=True)
+
+    yield
+
+    for path in [".data", ".my_data", "user_data", ".taipy"]:
+        if os.path.exists(path):
+            shutil.rmtree(path, ignore_errors=True)
+
+
 @pytest.fixture(scope="function", autouse=True)
 def clean_repository(init_config, init_managers, init_orchestrator, init_notifier, clean_argparser):
     clean_argparser()

+ 17 - 0
tests/core/cycle/test_cycle.py

@@ -14,7 +14,24 @@ from datetime import timedelta
 from taipy.config.common.frequency import Frequency
 from taipy.core import CycleId
 from taipy.core.cycle._cycle_manager import _CycleManager
+from taipy.core.cycle._cycle_manager_factory import _CycleManagerFactory
 from taipy.core.cycle.cycle import Cycle
+from taipy.core.task.task import Task
+
+
+def test_cycle_equals(cycle):
+    cycle_manager = _CycleManagerFactory()._build_manager()
+
+    cycle_id = cycle.id
+    cycle_manager._set(cycle)
+
+    # To test if instance is same type
+    task = Task("task", {}, print, [], [], cycle_id)
+
+    cycle_2 = cycle_manager._get(cycle_id)
+    assert cycle == cycle_2
+    assert cycle != cycle_id
+    assert cycle != task
 
 
 def test_create_cycle_entity(current_datetime):

+ 4 - 2
tests/core/data/test_csv_data_node.py

@@ -11,6 +11,7 @@
 
 import os
 import pathlib
+import uuid
 from datetime import datetime
 from time import sleep
 
@@ -102,7 +103,8 @@ class TestCSVDataNode:
         ],
     )
     def test_create_with_default_data(self, properties, exists):
-        dn = CSVDataNode("foo", Scope.SCENARIO, DataNodeId("dn_id"), properties=properties)
+        dn = CSVDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties)
+        assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "csvs", dn.id + ".csv")
         assert os.path.exists(dn.path) is exists
 
     def test_set_path(self):
@@ -165,5 +167,5 @@ class TestCSVDataNode:
 
         dn = CSVDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
 
-        assert ".data" not in dn.path.name
+        assert ".data" not in dn.path
         assert os.path.exists(dn.path)

+ 87 - 49
tests/core/data/test_data_manager.py

@@ -8,6 +8,7 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
+
 import os
 import pathlib
 
@@ -77,11 +78,14 @@ class TestDataManager:
         assert _DataManager._get(csv_dn.id).job_ids == csv_dn.job_ids
         assert not _DataManager._get(csv_dn.id).is_ready_for_reading
         assert _DataManager._get(csv_dn.id).is_ready_for_reading == csv_dn.is_ready_for_reading
-        assert len(_DataManager._get(csv_dn.id).properties) == 4
+        assert (
+            len(_DataManager._get(csv_dn.id).properties) == 5
+        )  # path, encoding, has_header, exposed_type, is_generated
         assert _DataManager._get(csv_dn.id).properties.get("path") == "bar"
         assert _DataManager._get(csv_dn.id).properties.get("encoding") == "utf-8"
         assert _DataManager._get(csv_dn.id).properties.get("has_header") is True
         assert _DataManager._get(csv_dn.id).properties.get("exposed_type") == "pandas"
+        assert _DataManager._get(csv_dn.id).properties.get("is_generated") is False
         assert _DataManager._get(csv_dn.id).properties == csv_dn.properties
         assert _DataManager._get(csv_dn.id).edit_in_progress is False
         assert _DataManager._get(csv_dn.id)._editor_id is None
@@ -103,11 +107,12 @@ class TestDataManager:
         assert _DataManager._get(csv_dn).job_ids == csv_dn.job_ids
         assert not _DataManager._get(csv_dn).is_ready_for_reading
         assert _DataManager._get(csv_dn).is_ready_for_reading == csv_dn.is_ready_for_reading
-        assert len(_DataManager._get(csv_dn).properties) == 4
+        assert len(_DataManager._get(csv_dn).properties) == 5  # path, encoding, has_header, exposed_type, is_generated
         assert _DataManager._get(csv_dn).properties.get("path") == "bar"
         assert _DataManager._get(csv_dn).properties.get("encoding") == "utf-8"
         assert _DataManager._get(csv_dn).properties.get("has_header") is True
         assert _DataManager._get(csv_dn.id).properties.get("exposed_type") == "pandas"
+        assert _DataManager._get(csv_dn.id).properties.get("is_generated") is False
         assert _DataManager._get(csv_dn).properties == csv_dn.properties
         assert _DataManager._get(csv_dn.id).edit_in_progress is False
         assert _DataManager._get(csv_dn.id)._editor_id is None
@@ -118,8 +123,9 @@ class TestDataManager:
         dn = _DataManager._create_and_set(config, None, None)
 
         assert _DataManager._get(dn.id).last_edit_date is None
-        assert len(_DataManager._get(dn.id).properties) == 1
-        assert _DataManager._get(dn.id).properties.get("is_generated")
+        assert len(_DataManager._get(dn.id).properties) == 2  # is_generated and path
+        assert isinstance(_DataManager._get(dn.id).properties.get("path"), str)
+        assert _DataManager._get(dn.id).properties.get("is_generated") is True
         assert not _DataManager._get(dn.id).edit_in_progress
         assert _DataManager._get(dn.id)._editor_id is None
         assert _DataManager._get(dn.id)._editor_expiration_date is None
@@ -127,8 +133,9 @@ class TestDataManager:
         dn.lock_edit("foo")
 
         assert _DataManager._get(dn.id).last_edit_date is None
-        assert len(_DataManager._get(dn.id).properties) == 1
-        assert _DataManager._get(dn.id).properties.get("is_generated")
+        assert len(_DataManager._get(dn.id).properties) == 2  # is_generated and path
+        assert isinstance(_DataManager._get(dn.id).properties.get("path"), str)
+        assert _DataManager._get(dn.id).properties.get("is_generated") is True
         assert _DataManager._get(dn.id).edit_in_progress
         assert _DataManager._get(dn.id).editor_id == "foo"
         assert _DataManager._get(dn.id).editor_expiration_date is not None
@@ -136,8 +143,9 @@ class TestDataManager:
         dn.unlock_edit("foo")
 
         assert _DataManager._get(dn.id).last_edit_date is None
-        assert len(_DataManager._get(dn.id).properties) == 1
-        assert _DataManager._get(dn.id).properties.get("is_generated")
+        assert len(_DataManager._get(dn.id).properties) == 2  # is_generated and path
+        assert isinstance(_DataManager._get(dn.id).properties.get("path"), str)
+        assert _DataManager._get(dn.id).properties.get("is_generated") is True
         assert not _DataManager._get(dn.id).edit_in_progress
         assert _DataManager._get(dn.id).editor_id is None
         assert _DataManager._get(dn.id).editor_expiration_date is None
@@ -226,7 +234,7 @@ class TestDataManager:
         assert _DataManager._get(pickle_dn.id).job_ids == pickle_dn.job_ids
         assert not _DataManager._get(pickle_dn.id).is_ready_for_reading
         assert _DataManager._get(pickle_dn.id).is_ready_for_reading == pickle_dn.is_ready_for_reading
-        assert len(_DataManager._get(pickle_dn.id).properties) == 1
+        assert len(_DataManager._get(pickle_dn.id).properties) == 2  # is_generated and path
         assert _DataManager._get(pickle_dn.id).properties == pickle_dn.properties
 
         assert _DataManager._get(pickle_dn) is not None
@@ -245,7 +253,7 @@ class TestDataManager:
         assert _DataManager._get(pickle_dn).job_ids == pickle_dn.job_ids
         assert not _DataManager._get(pickle_dn).is_ready_for_reading
         assert _DataManager._get(pickle_dn).is_ready_for_reading == pickle_dn.is_ready_for_reading
-        assert len(_DataManager._get(pickle_dn).properties) == 1
+        assert len(_DataManager._get(pickle_dn).properties) == 2  # is_generated and path
         assert _DataManager._get(pickle_dn).properties == pickle_dn.properties
 
     def test_create_raises_exception_with_wrong_type(self):
@@ -459,59 +467,89 @@ class TestDataManager:
 
         dm._delete_all()
 
-    def test_clean_generated_pickle_files(self, pickle_file_path):
-        user_pickle_dn_config = Config.configure_data_node(
-            id="d1", storage_type="pickle", path=pickle_file_path, default_data="d"
+    @pytest.mark.parametrize(
+        "storage_type,path",
+        [
+            ("pickle", "pickle_file_path"),
+            ("csv", "csv_file"),
+            ("excel", "excel_file"),
+            ("json", "json_file"),
+            ("parquet", "parquet_file_path"),
+        ],
+    )
+    def test_clean_generated_files(self, storage_type, path, request):
+        path = request.getfixturevalue(path)
+        user_dn_config = Config.configure_data_node(
+            id="d1", storage_type=storage_type, path=path, default_data={"a": [1], "b": [2]}
         )
-        generated_pickle_dn_1_config = Config.configure_data_node(id="d2", storage_type="pickle", default_data="d")
-        generated_pickle_dn_2_config = Config.configure_data_node(id="d3", storage_type="pickle", default_data="d")
-
-        dns = _DataManager._bulk_get_or_create(
-            [user_pickle_dn_config, generated_pickle_dn_1_config, generated_pickle_dn_2_config]
+        generated_dn_1_config = Config.configure_data_node(
+            id="d2", storage_type=storage_type, default_data={"a": [1], "b": [2]}
+        )
+        generated_dn_2_config = Config.configure_data_node(
+            id="d3", storage_type=storage_type, default_data={"a": [1], "b": [2]}
         )
 
-        user_pickle_dn = dns[user_pickle_dn_config]
-        generated_pickle_dn_1 = dns[generated_pickle_dn_1_config]
-        generated_pickle_dn_2 = dns[generated_pickle_dn_2_config]
-
-        _DataManager._clean_pickle_file(user_pickle_dn.id)
-        assert file_exists(user_pickle_dn.path)
-
-        _DataManager._clean_pickle_files([generated_pickle_dn_1, generated_pickle_dn_2])
-        assert not file_exists(generated_pickle_dn_1.path)
-        assert not file_exists(generated_pickle_dn_2.path)
-
-    def test_delete_does_clean_generated_pickle_files(self, pickle_file_path):
-        user_pickle_dn_config = Config.configure_data_node(
-            id="d1", storage_type="pickle", path=pickle_file_path, default_data="d"
+        dns = _DataManager._bulk_get_or_create([user_dn_config, generated_dn_1_config, generated_dn_2_config])
+
+        user_dn = dns[user_dn_config]
+        generated_dn_1 = dns[generated_dn_1_config]
+        generated_dn_2 = dns[generated_dn_2_config]
+
+        _DataManager._clean_generated_file(user_dn.id)
+        assert file_exists(user_dn.path)
+
+        _DataManager._clean_generated_files([generated_dn_1, generated_dn_2])
+        assert not file_exists(generated_dn_1.path)
+        assert not file_exists(generated_dn_2.path)
+
+    @pytest.mark.parametrize(
+        "storage_type,path",
+        [
+            ("pickle", "pickle_file_path"),
+            ("csv", "csv_file"),
+            ("excel", "excel_file"),
+            ("json", "json_file"),
+            ("parquet", "parquet_file_path"),
+        ],
+    )
+    def test_delete_does_clean_generated_pickle_files(self, storage_type, path, request):
+        path = request.getfixturevalue(path)
+        user_dn_config = Config.configure_data_node(
+            id="d1", storage_type=storage_type, path=path, default_data={"a": [1], "b": [2]}
+        )
+        generated_dn_config_1 = Config.configure_data_node(
+            id="d2", storage_type=storage_type, default_data={"a": [1], "b": [2]}
+        )
+        generated_dn_config_2 = Config.configure_data_node(
+            id="d3", storage_type=storage_type, default_data={"a": [1], "b": [2]}
+        )
+        generated_dn_config_3 = Config.configure_data_node(
+            id="d4", storage_type=storage_type, default_data={"a": [1], "b": [2]}
         )
-        generated_pickle_dn_config_1 = Config.configure_data_node(id="d2", storage_type="pickle", default_data="d")
-        generated_pickle_dn_config_2 = Config.configure_data_node(id="d3", storage_type="pickle", default_data="d")
-        generated_pickle_dn_config_3 = Config.configure_data_node(id="d4", storage_type="pickle", default_data="d")
 
         dns = _DataManager._bulk_get_or_create(
             [
-                user_pickle_dn_config,
-                generated_pickle_dn_config_1,
-                generated_pickle_dn_config_2,
-                generated_pickle_dn_config_3,
+                user_dn_config,
+                generated_dn_config_1,
+                generated_dn_config_2,
+                generated_dn_config_3,
             ]
         )
 
-        user_pickle_dn = dns[user_pickle_dn_config]
-        generated_pickle_dn_1 = dns[generated_pickle_dn_config_1]
-        generated_pickle_dn_2 = dns[generated_pickle_dn_config_2]
-        generated_pickle_dn_3 = dns[generated_pickle_dn_config_3]
+        user_dn = dns[user_dn_config]
+        generated_dn_1 = dns[generated_dn_config_1]
+        generated_dn_2 = dns[generated_dn_config_2]
+        generated_dn_3 = dns[generated_dn_config_3]
 
-        _DataManager._delete(user_pickle_dn.id)
-        assert file_exists(user_pickle_dn.path)
+        _DataManager._delete(user_dn.id)
+        assert file_exists(user_dn.path)
 
-        _DataManager._delete_many([generated_pickle_dn_1.id, generated_pickle_dn_2.id])
-        assert not file_exists(generated_pickle_dn_1.path)
-        assert not file_exists(generated_pickle_dn_2.path)
+        _DataManager._delete_many([generated_dn_1.id, generated_dn_2.id])
+        assert not file_exists(generated_dn_1.path)
+        assert not file_exists(generated_dn_2.path)
 
         _DataManager._delete_all()
-        assert not file_exists(generated_pickle_dn_3.path)
+        assert not file_exists(generated_dn_3.path)
 
     def test_create_dn_from_loaded_config_no_scope(self):
         file_config = NamedTemporaryFile(

+ 16 - 0
tests/core/data/test_data_node.py

@@ -21,11 +21,13 @@ from taipy.config import Config
 from taipy.config.common.scope import Scope
 from taipy.config.exceptions.exceptions import InvalidConfigurationId
 from taipy.core.data._data_manager import _DataManager
+from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data.data_node import DataNode
 from taipy.core.data.data_node_id import DataNodeId
 from taipy.core.data.in_memory import InMemoryDataNode
 from taipy.core.exceptions.exceptions import DataNodeIsBeingEdited, NoData
 from taipy.core.job.job_id import JobId
+from taipy.core.task.task import Task
 
 from .utils import FakeDataNode
 
@@ -46,6 +48,20 @@ def funct_b_d(input: str):
 
 
 class TestDataNode:
+    def test_dn_equals(self, data_node):
+        data_manager = _DataManagerFactory()._build_manager()
+
+        dn_id = data_node.id
+        data_manager._set(data_node)
+
+        # # To test if instance is same type
+        task = Task("task", {}, print, [], [], dn_id)
+
+        dn_2 = data_manager._get(dn_id)
+        assert data_node == dn_2
+        assert data_node != dn_id
+        assert data_node != task
+
     def test_create_with_default_values(self):
         dn = DataNode("foo_bar")
         assert dn.config_id == "foo_bar"

+ 5 - 3
tests/core/data/test_excel_data_node.py

@@ -11,6 +11,7 @@
 
 import os
 import pathlib
+import uuid
 from datetime import datetime
 from time import sleep
 from typing import Dict
@@ -132,12 +133,13 @@ class TestExcelDataNode:
     @pytest.mark.parametrize(
         ["properties", "exists"],
         [
-            ({}, False),
             ({"default_data": {"a": ["foo", "bar"]}}, True),
+            ({}, False),
         ],
     )
     def test_create_with_default_data(self, properties, exists):
-        dn = ExcelDataNode("foo", Scope.SCENARIO, DataNodeId("dn_id"), properties=properties)
+        dn = ExcelDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties)
+        assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "excels", dn.id + ".xlsx")
         assert os.path.exists(dn.path) is exists
 
     def test_read_write_after_modify_path(self):
@@ -361,5 +363,5 @@ class TestExcelDataNode:
 
         dn = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"})
 
-        assert ".data" not in dn.path.name
+        assert ".data" not in dn.path
         assert os.path.exists(dn.path)

+ 5 - 3
tests/core/data/test_json_data_node.py

@@ -13,6 +13,7 @@ import datetime
 import json
 import os
 import pathlib
+import uuid
 from dataclasses import dataclass
 from enum import Enum
 from time import sleep
@@ -308,12 +309,13 @@ class TestJSONDataNode:
     @pytest.mark.parametrize(
         ["properties", "exists"],
         [
-            ({}, False),
             ({"default_data": {"foo": "bar"}}, True),
+            ({}, False),
         ],
     )
     def test_create_with_default_data(self, properties, exists):
-        dn = JSONDataNode("foo", Scope.SCENARIO, DataNodeId("dn_id"), properties=properties)
+        dn = JSONDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties)
+        assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "jsons", dn.id + ".json")
         assert os.path.exists(dn.path) is exists
 
     def test_set_path(self):
@@ -366,5 +368,5 @@ class TestJSONDataNode:
 
         dn = JSONDataNode("foo", Scope.SCENARIO, properties={"path": path})
 
-        assert ".data" not in dn.path.name
+        assert ".data" not in dn.path
         assert os.path.exists(dn.path)

+ 4 - 2
tests/core/data/test_parquet_data_node.py

@@ -11,6 +11,7 @@
 
 import os
 import pathlib
+import uuid
 from datetime import datetime
 from importlib import util
 from time import sleep
@@ -127,7 +128,8 @@ class TestParquetDataNode:
         ],
     )
     def test_create_with_default_data(self, properties, exists):
-        dn = ParquetDataNode("foo", Scope.SCENARIO, DataNodeId("dn_id"), properties=properties)
+        dn = ParquetDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties)
+        assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "parquets", dn.id + ".parquet")
         assert os.path.exists(dn.path) is exists
 
     @pytest.mark.parametrize("engine", __engine)
@@ -217,5 +219,5 @@ class TestParquetDataNode:
 
         dn = ParquetDataNode("foo_bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})
 
-        assert ".data" not in dn.path.name
+        assert ".data" not in dn.path
         assert os.path.exists(dn.path)

+ 2 - 2
tests/core/data/test_pickle_data_node.py

@@ -44,7 +44,7 @@ class TestPickleDataNodeEntity:
 
     def test_create(self):
         dn = PickleDataNode("foobar_bazxyxea", Scope.SCENARIO, properties={"default_data": "Data"})
-        assert os.path.isfile(Config.core.storage_folder + "pickles/" + dn.id + ".p")
+        assert os.path.isfile(os.path.join(Config.core.storage_folder.strip("/"), "pickles", dn.id + ".p"))
         assert isinstance(dn, PickleDataNode)
         assert dn.storage_type() == "pickle"
         assert dn.config_id == "foobar_bazxyxea"
@@ -190,5 +190,5 @@ class TestPickleDataNodeEntity:
 
         dn = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar", "path": path})
 
-        assert ".data" not in dn.path.name
+        assert ".data" not in dn.path
         assert os.path.exists(dn.path)

+ 18 - 0
tests/core/job/test_job.py

@@ -27,11 +27,13 @@ from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.config.job_config import JobConfig
 from taipy.core.data.in_memory import InMemoryDataNode
 from taipy.core.job._job_manager import _JobManager
+from taipy.core.job._job_manager_factory import _JobManagerFactory
 from taipy.core.job.job import Job
 from taipy.core.job.status import Status
 from taipy.core.scenario.scenario import Scenario
 from taipy.core.submission._submission_manager_factory import _SubmissionManagerFactory
 from taipy.core.task._task_manager import _TaskManager
+from taipy.core.task._task_manager_factory import _TaskManagerFactory
 from taipy.core.task.task import Task
 
 
@@ -83,6 +85,22 @@ def _error():
     raise RuntimeError("Something bad has happened")
 
 
+def test_job_equals(job):
+    _TaskManagerFactory._build_manager()._set(job.task)
+    job_manager = _JobManagerFactory()._build_manager()
+
+    job_id = job.id
+    job_manager._set(job)
+
+    # To test if instance is same type
+    task = Task("task", {}, print, [], [], job_id)
+
+    job_2 = job_manager._get(job_id)
+    assert job == job_2
+    assert job != job_id
+    assert job != task
+
+
 def test_create_job(scenario, task, job):
     from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory
 

+ 15 - 0
tests/core/scenario/test_scenario.py

@@ -32,6 +32,21 @@ from taipy.core.task._task_manager_factory import _TaskManagerFactory
 from taipy.core.task.task import Task, TaskId
 
 
+def test_scenario_equals(scenario):
+    scenario_manager = _ScenarioManagerFactory()._build_manager()
+
+    scenario_id = scenario.id
+    scenario_manager._set(scenario)
+
+    # To test if instance is same type
+    task = Task("task", {}, print, [], [], scenario_id)
+
+    scenario_2 = scenario_manager._get(scenario_id)
+    assert scenario == scenario_2
+    assert scenario != scenario_id
+    assert scenario != task
+
+
 def test_create_primary_scenario(cycle):
     scenario = Scenario("foo", set(), {"key": "value"}, is_primary=True, cycle=cycle)
     assert scenario.id is not None

+ 72 - 0
tests/core/scenario/test_scenario_manager.py

@@ -19,6 +19,7 @@ from taipy.config.common.frequency import Frequency
 from taipy.config.common.scope import Scope
 from taipy.config.config import Config
 from taipy.core import Job
+from taipy.core import taipy as tp
 from taipy.core._orchestrator._orchestrator import _Orchestrator
 from taipy.core._version._version_manager import _VersionManager
 from taipy.core.common import _utils
@@ -813,6 +814,77 @@ def test_get_set_primary_scenario():
     assert _ScenarioManager._get_primary(cycle_1) == scenario_2
 
 
+def test_get_primary_scenarios_sorted():
+    scenario_1_cfg = Config.configure_scenario(id="scenario_1", frequency=Frequency.DAILY)
+    scenario_2_cfg = Config.configure_scenario(id="scenario_2", frequency=Frequency.DAILY)
+
+    not_primary_scenario = _ScenarioManager._create(scenario_1_cfg, name="not_primary_scenario")
+    now = datetime.now()
+    scenario_1 = _ScenarioManager._create(scenario_1_cfg, now, "B_scenario")
+    scenario_2 = _ScenarioManager._create(scenario_2_cfg, now + timedelta(days=2), "A_scenario")
+    scenario_3 = _ScenarioManager._create(scenario_2_cfg, now + timedelta(days=4), "C_scenario")
+    scenario_4 = _ScenarioManager._create(scenario_2_cfg, now + timedelta(days=3), "D_scenario")
+
+    _ScenarioManager._set_primary(scenario_1)
+    scenario_1.tags = ["banana", "kiwi"]
+    _ScenarioManager._set_primary(scenario_2)
+    scenario_2.tags = ["apple", "banana"]
+    _ScenarioManager._set_primary(scenario_3)
+    scenario_3.tags = ["banana", "kiwi"]
+    _ScenarioManager._set_primary(scenario_4)
+
+    all_scenarios = tp.get_scenarios()
+    assert not_primary_scenario in all_scenarios
+
+    primary_scenarios = _ScenarioManager._get_primary_scenarios()
+    assert not_primary_scenario not in primary_scenarios
+
+    primary_scenarios_sorted_by_name = [scenario_2, scenario_1, scenario_3, scenario_4]
+    assert primary_scenarios_sorted_by_name == _ScenarioManager._sort_scenarios(
+        primary_scenarios, descending=False, sort_key="name"
+    )
+
+    scenarios_with_same_config_id = [scenario_2, scenario_3, scenario_4]
+    scenarios_with_same_config_id.sort(key=lambda x: x.id)
+    primary_scenarios_sorted_by_config_id = [
+        scenario_1,
+        scenarios_with_same_config_id[0],
+        scenarios_with_same_config_id[1],
+        scenarios_with_same_config_id[2],
+    ]
+    assert primary_scenarios_sorted_by_config_id == _ScenarioManager._sort_scenarios(
+        primary_scenarios, descending=False, sort_key="config_id"
+    )
+
+    scenarios_sorted_by_id = [scenario_1, scenario_2, scenario_3, scenario_4]
+    scenarios_sorted_by_id.sort(key=lambda x: x.id)
+    assert scenarios_sorted_by_id == _ScenarioManager._sort_scenarios(
+        primary_scenarios, descending=False, sort_key="id"
+    )
+
+    primary_scenarios_sorted_by_creation_date = [scenario_1, scenario_2, scenario_4, scenario_3]
+    assert primary_scenarios_sorted_by_creation_date == _ScenarioManager._sort_scenarios(
+        primary_scenarios, descending=False, sort_key="creation_date"
+    )
+
+    scenarios_with_same_tags = [scenario_1, scenario_3]
+    scenarios_with_same_tags.sort(key=lambda x: x.id)
+    primary_scenarios_sorted_by_tags = [
+        scenario_4,
+        scenario_2,
+        scenarios_with_same_tags[0],
+        scenarios_with_same_tags[1],
+    ]
+    assert primary_scenarios_sorted_by_tags == _ScenarioManager._sort_scenarios(
+        primary_scenarios, descending=False, sort_key="tags"
+    )
+
+    primary_scenarios_sorted_by_name_descending_order = [scenario_4, scenario_3, scenario_1, scenario_2]
+    assert primary_scenarios_sorted_by_name_descending_order == _ScenarioManager._sort_scenarios(
+        primary_scenarios, descending=True, sort_key="name"
+    )
+
+
 def test_hard_delete_one_single_scenario_with_scenario_data_nodes():
     dn_input_config = Config.configure_data_node("my_input", "in_memory", scope=Scope.SCENARIO, default_data="testing")
     dn_output_config = Config.configure_data_node("my_output", "in_memory", scope=Scope.SCENARIO)

+ 20 - 0
tests/core/sequence/test_sequence.py

@@ -13,6 +13,7 @@ from unittest import mock
 
 import pytest
 
+from taipy.config import Config
 from taipy.config.common.scope import Scope
 from taipy.core.common._utils import _Subscriber
 from taipy.core.data._data_manager_factory import _DataManagerFactory
@@ -28,6 +29,25 @@ from taipy.core.task._task_manager import _TaskManager
 from taipy.core.task.task import Task, TaskId
 
 
+def test_sequence_equals():
+    task_config = Config.configure_task("mult_by_3", print, [], None)
+    scenario_config = Config.configure_scenario("scenario", [task_config])
+
+    scenario = _ScenarioManager._create(scenario_config)
+    scenario.add_sequences({"print": list(scenario.tasks.values())})
+    sequence_1 = scenario.sequences["print"]
+    sequence_id = sequence_1.id
+
+    assert sequence_1.name == "print"
+    sequence_2 = _SequenceManager._get(sequence_id)
+    # To test if instance is same type
+    task = Task("task", {}, print, [], [], sequence_id)
+
+    assert sequence_1 == sequence_2
+    assert sequence_1 != sequence_id
+    assert sequence_1 != task
+
+
 def test_create_sequence():
     input = InMemoryDataNode("foo", Scope.SCENARIO)
     output = InMemoryDataNode("bar", Scope.SCENARIO)

+ 15 - 0
tests/core/submission/test_submission.py

@@ -24,6 +24,21 @@ from taipy.core.task._task_manager_factory import _TaskManagerFactory
 from taipy.core.task.task import Task
 
 
+def test_submission_equals(submission):
+    submission_manager = _SubmissionManagerFactory()._build_manager()
+
+    submission_id = submission.id
+    submission_manager._set(submission)
+
+    # To test if instance is same type
+    task = Task("task", {}, print, [], [], submission_id)
+
+    submission_2 = submission_manager._get(submission_id)
+    assert submission == submission_2
+    assert submission != submission_id
+    assert submission != task
+
+
 def test_create_submission(scenario, job, current_datetime):
     submission_1 = Submission(scenario.id, scenario._ID_PREFIX, scenario.config_id)
 

+ 16 - 0
tests/core/task/test_task.py

@@ -22,6 +22,7 @@ from taipy.core.data.csv import CSVDataNode
 from taipy.core.data.data_node import DataNode
 from taipy.core.data.in_memory import InMemoryDataNode
 from taipy.core.task._task_manager import _TaskManager
+from taipy.core.task._task_manager_factory import _TaskManagerFactory
 from taipy.core.task.task import Task
 
 
@@ -45,6 +46,21 @@ def input_config():
     return [DataNodeConfig("input_name_1"), DataNodeConfig("input_name_2"), DataNodeConfig("input_name_3")]
 
 
+def test_task_equals(task):
+    task_manager = _TaskManagerFactory()._build_manager()
+
+    task_id = task.id
+    task_manager._set(task)
+
+    # To test if instance is same type
+    dn = CSVDataNode("foo_bar", Scope.SCENARIO, task_id)
+
+    task_2 = task_manager._get(task_id)
+    assert task == task_2
+    assert task != task_id
+    assert task != dn
+
+
 def test_create_task():
     name = "name_1"
     task = Task(name, {}, print, [], [])

+ 47 - 0
tests/core/test_taipy.py

@@ -432,6 +432,53 @@ class TestTaipy:
             tp.get_scenarios(tag="tag")
             mck.assert_called_once_with("tag")
 
+    def test_get_scenarios_sorted(self):
+        scenario_1_cfg = Config.configure_scenario(id="scenario_1")
+        scenario_2_cfg = Config.configure_scenario(id="scenario_2")
+
+        now = datetime.datetime.now() + datetime.timedelta(seconds=1)
+        scenario_1 = _ScenarioManager._create(scenario_1_cfg, now, "B_scenario")
+        scenario_2 = _ScenarioManager._create(scenario_2_cfg, now + datetime.timedelta(seconds=1), "C_scenario")
+        scenario_3 = _ScenarioManager._create(scenario_2_cfg, now + datetime.timedelta(seconds=2), "A_scenario")
+        scenario_4 = _ScenarioManager._create(scenario_2_cfg, now + datetime.timedelta(seconds=3), "D_scenario")
+
+        _ScenarioManager._tag(scenario_1, "banana")
+        _ScenarioManager._tag(scenario_1, "kiwi")  # scenario_1 now has tags {"banana", "kiwi"}
+        _ScenarioManager._tag(scenario_2, "apple")
+        _ScenarioManager._tag(scenario_2, "banana")  # scenario_2 now has tags {"banana", "apple"}
+        _ScenarioManager._tag(scenario_3, "apple")
+        _ScenarioManager._tag(scenario_3, "kiwi")  # scenario_3 now has tags {"kiwi", "apple"}
+
+        scenarios_sorted_by_name = [scenario_3, scenario_1, scenario_2, scenario_4]
+        assert scenarios_sorted_by_name == tp.get_scenarios(is_sorted=True, sort_key="name")
+        assert scenarios_sorted_by_name == tp.get_scenarios(is_sorted=True, sort_key="wrong_sort_key")
+
+        scenarios_with_same_config_id = [scenario_2, scenario_3, scenario_4]
+        scenarios_with_same_config_id.sort(key=lambda x: x.id)
+        scenarios_sorted_by_config_id = [
+            scenario_1,
+            scenarios_with_same_config_id[0],
+            scenarios_with_same_config_id[1],
+            scenarios_with_same_config_id[2],
+        ]
+        assert scenarios_sorted_by_config_id == tp.get_scenarios(is_sorted=True, sort_key="config_id")
+
+        scenarios_sorted_by_id = [scenario_1, scenario_2, scenario_3, scenario_4]
+        scenarios_sorted_by_id.sort(key=lambda x: x.id)
+        assert scenarios_sorted_by_id == tp.get_scenarios(is_sorted=True, sort_key="id")
+
+        scenarios_sorted_by_creation_date = [scenario_1, scenario_2, scenario_3, scenario_4]
+        assert scenarios_sorted_by_creation_date == tp.get_scenarios(is_sorted=True, sort_key="creation_date")
+
+        # Note: the scenario without any tags comes first.
+        scenarios_sorted_by_tag = [scenario_4, scenario_2, scenario_3, scenario_1]
+        assert scenarios_sorted_by_tag == tp.get_scenarios(is_sorted=True, sort_key="tags")
+
+        scenarios_sorted_by_name_descending_order = [scenario_4, scenario_2, scenario_1, scenario_3]
+        assert scenarios_sorted_by_name_descending_order == tp.get_scenarios(
+            is_sorted=True, descending=True, sort_key="name"
+        )
+
     def test_get_scenario(self, scenario):
         with mock.patch("taipy.core.scenario._scenario_manager._ScenarioManager._get") as mck:
             scenario_id = ScenarioId("SCENARIO_id")

+ 80 - 52
tests/core/test_taipy/test_export.py

@@ -10,21 +10,23 @@
 # specific language governing permissions and limitations under the License.
 
 import os
-import shutil
+import zipfile
 
 import pandas as pd
 import pytest
 
 import taipy.core.taipy as tp
 from taipy import Config, Frequency, Scope
-from taipy.core.exceptions import ExportFolderAlreadyExists, InvalidExportPath
+from taipy.core.exceptions import ExportPathAlreadyExists
 
 
 @pytest.fixture(scope="function", autouse=True)
-def clean_tmp_folder():
-    shutil.rmtree("./tmp", ignore_errors=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
     yield
-    shutil.rmtree("./tmp", ignore_errors=True)
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
 
 
 def plus_1(x):
@@ -57,15 +59,28 @@ def configure_test_scenario(input_data, frequency=None):
     return scenario_cfg
 
 
-def test_export_scenario_to_the_storage_folder():
+def test_export_scenario_with_and_without_zip_extension(tmp_path):
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+
     scenario = tp.create_scenario(scenario_cfg)
+    tp.submit(scenario)
+
+    # Export without the .zip extension should create the tmp.zip file
+    tp.export_scenario(scenario.id, f"{tmp_path}/tmp")
+    assert os.path.exists(f"{tmp_path}/tmp.zip")
+
+    os.remove(f"{tmp_path}/tmp.zip")
 
-    with pytest.raises(InvalidExportPath):
-        tp.export_scenario(scenario.id, Config.core.taipy_storage_folder)
+    # Export with the .zip extension should also create the tmp.zip file
+    tp.export_scenario(scenario.id, f"{tmp_path}/tmp.zip")
+    assert os.path.exists(f"{tmp_path}/tmp.zip")
 
+    # Export with another extension should create the tmp.<extension>.zip file
+    tp.export_scenario(scenario.id, f"{tmp_path}/tmp.tar.gz")
+    assert os.path.exists(f"{tmp_path}/tmp.tar.gz.zip")
 
-def test_export_scenario_with_cycle():
+
+def test_export_scenario_with_cycle(tmp_path):
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
 
     scenario = tp.create_scenario(scenario_cfg)
@@ -73,9 +88,11 @@ def test_export_scenario_with_cycle():
     jobs = submission.jobs
 
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
-    assert sorted(os.listdir("./tmp/exp_scenario/data_nodes")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/data_nodes")) == sorted(
         [
             f"{scenario.i_1.id}.json",
             f"{scenario.o_1_csv.id}.json",
@@ -84,7 +101,7 @@ def test_export_scenario_with_cycle():
             f"{scenario.o_1_json.id}.json",
         ]
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/tasks")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/tasks")) == sorted(
         [
             f"{scenario.t_1_csv.id}.json",
             f"{scenario.t_1_excel.id}.json",
@@ -92,32 +109,34 @@ def test_export_scenario_with_cycle():
             f"{scenario.t_1_json.id}.json",
         ]
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/scenarios")) == sorted([f"{scenario.id}.json"])
-    assert sorted(os.listdir("./tmp/exp_scenario/jobs")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/scenarios")) == sorted([f"{scenario.id}.json"])
+    assert sorted(os.listdir(f"{tmp_path}/jobs")) == sorted(
         [f"{jobs[0].id}.json", f"{jobs[1].id}.json", f"{jobs[2].id}.json", f"{jobs[3].id}.json"]
     )
-    assert os.listdir("./tmp/exp_scenario/submission") == [f"{submission.id}.json"]
-    assert sorted(os.listdir("./tmp/exp_scenario/cycles")) == sorted([f"{scenario.cycle.id}.json"])
+    assert os.listdir(f"{tmp_path}/submission") == [f"{submission.id}.json"]
+    assert sorted(os.listdir(f"{tmp_path}/cycles")) == sorted([f"{scenario.cycle.id}.json"])
 
 
-def test_export_scenario_without_cycle():
+def test_export_scenario_without_cycle(tmp_path):
     scenario_cfg = configure_test_scenario(1)
 
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
 
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
-    assert os.path.exists("./tmp/exp_scenario/data_nodes")
-    assert os.path.exists("./tmp/exp_scenario/tasks")
-    assert os.path.exists("./tmp/exp_scenario/scenarios")
-    assert os.path.exists("./tmp/exp_scenario/jobs")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert not os.path.exists("./tmp/exp_scenario/cycles")  # No cycle
+    assert os.path.exists(f"{tmp_path}/data_nodes")
+    assert os.path.exists(f"{tmp_path}/tasks")
+    assert os.path.exists(f"{tmp_path}/scenarios")
+    assert os.path.exists(f"{tmp_path}/jobs")
+    assert os.path.exists(f"{tmp_path}/submission")
+    assert not os.path.exists(f"{tmp_path}/cycles")  # No cycle
 
 
-def test_export_scenario_override_existing_files():
+def test_export_scenario_override_existing_files(tmp_path):
     scenario_1_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_2_cfg = configure_test_scenario(2)
 
@@ -125,45 +144,54 @@ def test_export_scenario_override_existing_files():
     tp.submit(scenario_1)
 
     # Export the submitted scenario_1
-    tp.export_scenario(scenario_1.id, "./tmp/exp_scenario")
-    assert os.path.exists("./tmp/exp_scenario/data_nodes")
-    assert os.path.exists("./tmp/exp_scenario/tasks")
-    assert os.path.exists("./tmp/exp_scenario/scenarios")
-    assert os.path.exists("./tmp/exp_scenario/jobs")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert os.path.exists("./tmp/exp_scenario/cycles")
+    tp.export_scenario(scenario_1.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_1")
+    assert os.path.exists(f"{tmp_path}/scenario_1/data_nodes")
+    assert os.path.exists(f"{tmp_path}/scenario_1/tasks")
+    assert os.path.exists(f"{tmp_path}/scenario_1/scenarios")
+    assert os.path.exists(f"{tmp_path}/scenario_1/jobs")
+    assert os.path.exists(f"{tmp_path}/scenario_1/submission")
+    assert os.path.exists(f"{tmp_path}/scenario_1/cycles")
 
     scenario_2 = tp.create_scenario(scenario_2_cfg)
     tp.submit(scenario_2)
 
-    # Export the submitted scenario_2 to the same folder should raise an error
-    with pytest.raises(ExportFolderAlreadyExists):
-        tp.export_scenario(scenario_2.id, "./tmp/exp_scenario")
+    # Export the submitted scenario_2 to the same path should raise an error
+    with pytest.raises(ExportPathAlreadyExists):
+        tp.export_scenario(scenario_2.id, "tmp.zip")
 
     # Export the submitted scenario_2 without a cycle and override the existing files
-    tp.export_scenario(scenario_2.id, "./tmp/exp_scenario", override=True)
-    assert os.path.exists("./tmp/exp_scenario/data_nodes")
-    assert os.path.exists("./tmp/exp_scenario/tasks")
-    assert os.path.exists("./tmp/exp_scenario/scenarios")
-    assert os.path.exists("./tmp/exp_scenario/jobs")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    # The cycles folder should be removed when overriding
-    assert not os.path.exists("./tmp/exp_scenario/cycles")
-
-
-def test_export_scenario_filesystem_with_data():
+    tp.export_scenario(scenario_2.id, "tmp.zip", override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_2")
+    assert os.path.exists(f"{tmp_path}/scenario_2/data_nodes")
+    assert os.path.exists(f"{tmp_path}/scenario_2/tasks")
+    assert os.path.exists(f"{tmp_path}/scenario_2/scenarios")
+    assert os.path.exists(f"{tmp_path}/scenario_2/jobs")
+    assert os.path.exists(f"{tmp_path}/scenario_2/submission")
+    # The cycles folder should not exists since the new scenario does not have a cycle
+    assert not os.path.exists(f"{tmp_path}/scenario_2/cycles")
+
+
+def test_export_scenario_filesystem_with_data(tmp_path):
     scenario_cfg = configure_test_scenario(1)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
 
     # Export scenario without data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
-    assert not os.path.exists("./tmp/exp_scenario/user_data")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_without_data")
+    assert not os.path.exists(f"{tmp_path}/scenario_without_data/user_data")
 
     # Export scenario with data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True, override=True)
-    assert os.path.exists("./tmp/exp_scenario/user_data")
-    data_files = [f for _, _, files in os.walk("./tmp/exp_scenario/user_data") for f in files]
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True, override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_with_data")
+    assert os.path.exists(f"{tmp_path}/scenario_with_data/user_data")
+
+    data_files = [f for _, _, files in os.walk(f"{tmp_path}/scenario_with_data/user_data") for f in files]
     assert sorted(data_files) == sorted(
         [
             f"{scenario.i_1.id}.p",
@@ -188,6 +216,6 @@ def test_export_non_file_based_data_node_raise_warning(caplog):
     tp.submit(scenario)
 
     # Export scenario with in-memory data node
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True)
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True)
     expected_warning = f"Data node {scenario.o_mem.id} is not a file-based data node and the data will not be exported"
     assert expected_warning in caplog.text

+ 63 - 56
tests/core/test_taipy/test_export_with_sql_repo.py

@@ -10,21 +10,23 @@
 # specific language governing permissions and limitations under the License.
 
 import os
-import shutil
+import zipfile
 
 import pandas as pd
 import pytest
 
 import taipy.core.taipy as tp
 from taipy import Config, Frequency, Scope
-from taipy.core.exceptions import ExportFolderAlreadyExists, InvalidExportPath
+from taipy.core.exceptions import ExportPathAlreadyExists
 
 
 @pytest.fixture(scope="function", autouse=True)
-def clean_tmp_folder():
-    shutil.rmtree("./tmp", ignore_errors=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
     yield
-    shutil.rmtree("./tmp", ignore_errors=True)
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
 
 
 def plus_1(x):
@@ -57,15 +59,7 @@ def configure_test_scenario(input_data, frequency=None):
     return scenario_cfg
 
 
-def test_export_scenario_to_the_storage_folder(init_sql_repo):
-    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
-    scenario = tp.create_scenario(scenario_cfg)
-
-    with pytest.raises(InvalidExportPath):
-        tp.export_scenario(scenario.id, Config.core.taipy_storage_folder)
-
-
-def test_export_scenario_with_cycle(init_sql_repo):
+def test_export_scenario_with_cycle(tmp_path, init_sql_repo):
     scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
 
     scenario = tp.create_scenario(scenario_cfg)
@@ -73,9 +67,11 @@ def test_export_scenario_with_cycle(init_sql_repo):
     jobs = submission.jobs
 
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
-    assert sorted(os.listdir("./tmp/exp_scenario/data_node")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/data_node")) == sorted(
         [
             f"{scenario.i_1.id}.json",
             f"{scenario.o_1_csv.id}.json",
@@ -84,7 +80,7 @@ def test_export_scenario_with_cycle(init_sql_repo):
             f"{scenario.o_1_json.id}.json",
         ]
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/task")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/task")) == sorted(
         [
             f"{scenario.t_1_csv.id}.json",
             f"{scenario.t_1_excel.id}.json",
@@ -92,32 +88,34 @@ def test_export_scenario_with_cycle(init_sql_repo):
             f"{scenario.t_1_json.id}.json",
         ]
     )
-    assert sorted(os.listdir("./tmp/exp_scenario/scenario")) == sorted([f"{scenario.id}.json"])
-    assert sorted(os.listdir("./tmp/exp_scenario/job")) == sorted(
+    assert sorted(os.listdir(f"{tmp_path}/scenario")) == sorted([f"{scenario.id}.json"])
+    assert sorted(os.listdir(f"{tmp_path}/job")) == sorted(
         [f"{jobs[0].id}.json", f"{jobs[1].id}.json", f"{jobs[2].id}.json", f"{jobs[3].id}.json"]
     )
-    assert os.listdir("./tmp/exp_scenario/submission") == [f"{submission.id}.json"]
-    assert sorted(os.listdir("./tmp/exp_scenario/cycle")) == sorted([f"{scenario.cycle.id}.json"])
+    assert os.listdir(f"{tmp_path}/submission") == [f"{submission.id}.json"]
+    assert sorted(os.listdir(f"{tmp_path}/cycle")) == sorted([f"{scenario.cycle.id}.json"])
 
 
-def test_export_scenario_without_cycle(init_sql_repo):
+def test_export_scenario_without_cycle(tmp_path, init_sql_repo):
     scenario_cfg = configure_test_scenario(1)
 
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
 
     # Export the submitted scenario
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
 
-    assert os.path.exists("./tmp/exp_scenario/data_node")
-    assert os.path.exists("./tmp/exp_scenario/task")
-    assert os.path.exists("./tmp/exp_scenario/scenario")
-    assert os.path.exists("./tmp/exp_scenario/job")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert not os.path.exists("./tmp/exp_scenario/cycle")  # No cycle
+    assert os.path.exists(f"{tmp_path}/data_node")
+    assert os.path.exists(f"{tmp_path}/task")
+    assert os.path.exists(f"{tmp_path}/scenario")
+    assert os.path.exists(f"{tmp_path}/job")
+    assert os.path.exists(f"{tmp_path}/submission")
+    assert not os.path.exists(f"{tmp_path}/cycle")  # No cycle
 
 
-def test_export_scenario_override_existing_files(init_sql_repo):
+def test_export_scenario_override_existing_files(tmp_path, init_sql_repo):
     scenario_1_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
     scenario_2_cfg = configure_test_scenario(2)
 
@@ -125,45 +123,54 @@ def test_export_scenario_override_existing_files(init_sql_repo):
     tp.submit(scenario_1)
 
     # Export the submitted scenario_1
-    tp.export_scenario(scenario_1.id, "./tmp/exp_scenario")
-    assert os.path.exists("./tmp/exp_scenario/data_node")
-    assert os.path.exists("./tmp/exp_scenario/task")
-    assert os.path.exists("./tmp/exp_scenario/scenario")
-    assert os.path.exists("./tmp/exp_scenario/job")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    assert os.path.exists("./tmp/exp_scenario/cycle")
+    tp.export_scenario(scenario_1.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_1")
+    assert os.path.exists(f"{tmp_path}/scenario_1/data_node")
+    assert os.path.exists(f"{tmp_path}/scenario_1/task")
+    assert os.path.exists(f"{tmp_path}/scenario_1/scenario")
+    assert os.path.exists(f"{tmp_path}/scenario_1/job")
+    assert os.path.exists(f"{tmp_path}/scenario_1/submission")
+    assert os.path.exists(f"{tmp_path}/scenario_1/cycle")
 
     scenario_2 = tp.create_scenario(scenario_2_cfg)
     tp.submit(scenario_2)
 
     # Export the submitted scenario_2 to the same folder should raise an error
-    with pytest.raises(ExportFolderAlreadyExists):
-        tp.export_scenario(scenario_2.id, "./tmp/exp_scenario")
+    with pytest.raises(ExportPathAlreadyExists):
+        tp.export_scenario(scenario_2.id, "tmp.zip")
 
     # Export the submitted scenario_2 without a cycle and override the existing files
-    tp.export_scenario(scenario_2.id, "./tmp/exp_scenario", override=True)
-    assert os.path.exists("./tmp/exp_scenario/data_node")
-    assert os.path.exists("./tmp/exp_scenario/task")
-    assert os.path.exists("./tmp/exp_scenario/scenario")
-    assert os.path.exists("./tmp/exp_scenario/job")
-    assert os.path.exists("./tmp/exp_scenario/submission")
-    # The cycles folder should be removed when overriding
-    assert not os.path.exists("./tmp/exp_scenario/cycle")
-
-
-def test_export_scenario_filesystem_with_data(init_sql_repo):
+    tp.export_scenario(scenario_2.id, "tmp.zip", override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_2")
+    assert os.path.exists(f"{tmp_path}/scenario_2/data_node")
+    assert os.path.exists(f"{tmp_path}/scenario_2/task")
+    assert os.path.exists(f"{tmp_path}/scenario_2/scenario")
+    assert os.path.exists(f"{tmp_path}/scenario_2/job")
+    assert os.path.exists(f"{tmp_path}/scenario_2/submission")
+    # The cycles folder should not exists since the new scenario does not have a cycle
+    assert not os.path.exists(f"{tmp_path}/scenario_2/cycle")
+
+
+def test_export_scenario_sql_repo_with_data(tmp_path, init_sql_repo):
     scenario_cfg = configure_test_scenario(1)
     scenario = tp.create_scenario(scenario_cfg)
     tp.submit(scenario)
 
     # Export scenario without data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario")
-    assert not os.path.exists("./tmp/exp_scenario/user_data")
+    tp.export_scenario(scenario.id, "tmp.zip")
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_without_data")
+    assert not os.path.exists(f"{tmp_path}/scenario_without_data/user_data")
 
     # Export scenario with data
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True, override=True)
-    assert os.path.exists("./tmp/exp_scenario/user_data")
-    data_files = [f for _, _, files in os.walk("./tmp/exp_scenario/user_data") for f in files]
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True, override=True)
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path / "scenario_with_data")
+    assert os.path.exists(f"{tmp_path}/scenario_with_data/user_data")
+
+    data_files = [f for _, _, files in os.walk(f"{tmp_path}/scenario_with_data/user_data") for f in files]
     assert sorted(data_files) == sorted(
         [
             f"{scenario.i_1.id}.p",
@@ -188,6 +195,6 @@ def test_export_non_file_based_data_node_raise_warning(init_sql_repo, caplog):
     tp.submit(scenario)
 
     # Export scenario with in-memory data node
-    tp.export_scenario(scenario.id, "./tmp/exp_scenario", include_data=True)
+    tp.export_scenario(scenario.id, "tmp.zip", include_data=True)
     expected_warning = f"Data node {scenario.o_mem.id} is not a file-based data node and the data will not be exported"
     assert expected_warning in caplog.text

+ 213 - 0
tests/core/test_taipy/test_import.py

@@ -0,0 +1,213 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+import shutil
+import zipfile
+
+import pandas as pd
+import pytest
+
+import taipy.core.taipy as tp
+from taipy import Config, Frequency, Scope
+from taipy.core._version._version_manager import _VersionManager
+from taipy.core.cycle._cycle_manager import _CycleManager
+from taipy.core.data._data_manager import _DataManager
+from taipy.core.exceptions.exceptions import (
+    ConflictedConfigurationError,
+    EntitiesToBeImportAlredyExist,
+    ImportArchiveDoesntContainAnyScenario,
+    ImportScenarioDoesntHaveAVersion,
+)
+from taipy.core.job._job_manager import _JobManager
+from taipy.core.scenario._scenario_manager import _ScenarioManager
+from taipy.core.submission._submission_manager import _SubmissionManager
+from taipy.core.task._task_manager import _TaskManager
+
+
+@pytest.fixture(scope="function", autouse=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
+    yield
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
+
+
+def plus_1(x):
+    return x + 1
+
+
+def plus_1_dataframe(x):
+    return pd.DataFrame({"output": [x + 1]})
+
+
+def configure_test_scenario(input_data, frequency=None):
+    input_cfg = Config.configure_data_node(
+        id=f"i_{input_data}", storage_type="pickle", scope=Scope.SCENARIO, default_data=input_data
+    )
+    csv_output_cfg = Config.configure_data_node(id=f"o_{input_data}_csv", storage_type="csv")
+    excel_output_cfg = Config.configure_data_node(id=f"o_{input_data}_excel", storage_type="excel")
+    parquet_output_cfg = Config.configure_data_node(id=f"o_{input_data}_parquet", storage_type="parquet")
+    json_output_cfg = Config.configure_data_node(id=f"o_{input_data}_json", storage_type="json")
+
+    csv_task_cfg = Config.configure_task(f"t_{input_data}_csv", plus_1_dataframe, input_cfg, csv_output_cfg)
+    excel_task_cfg = Config.configure_task(f"t_{input_data}_excel", plus_1_dataframe, input_cfg, excel_output_cfg)
+    parquet_task_cfg = Config.configure_task(f"t_{input_data}_parquet", plus_1_dataframe, input_cfg, parquet_output_cfg)
+    json_task_cfg = Config.configure_task(f"t_{input_data}_json", plus_1, input_cfg, json_output_cfg)
+    scenario_cfg = Config.configure_scenario(
+        id=f"s_{input_data}",
+        task_configs=[csv_task_cfg, excel_task_cfg, parquet_task_cfg, json_task_cfg],
+        frequency=frequency,
+    )
+
+    return scenario_cfg
+
+
+def export_test_scenario(scenario_cfg, export_path="tmp.zip", override=False, include_data=False):
+    scenario = tp.create_scenario(scenario_cfg)
+    tp.submit(scenario)
+
+    # Export the submitted scenario
+    tp.export_scenario(scenario.id, export_path, override, include_data)
+    return scenario
+
+
+def test_import_scenario_without_data(init_managers):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    scenario = export_test_scenario(scenario_cfg)
+
+    init_managers()
+
+    assert _ScenarioManager._get_all() == []
+    imported_scenario = tp.import_scenario("tmp.zip")
+
+    # The imported scenario should be the same as the exported scenario
+    assert _ScenarioManager._get_all() == [imported_scenario]
+    assert imported_scenario == scenario
+
+    # All entities belonging to the scenario should be imported
+    assert len(_CycleManager._get_all()) == 1
+    assert len(_TaskManager._get_all()) == 4
+    assert len(_DataManager._get_all()) == 5
+    assert len(_JobManager._get_all()) == 4
+    assert len(_SubmissionManager._get_all()) == 1
+    assert len(_VersionManager._get_all()) == 1
+
+
+def test_import_scenario_with_data(init_managers):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg, include_data=True)
+
+    init_managers()
+
+    assert _ScenarioManager._get_all() == []
+    imported_scenario = tp.import_scenario("tmp.zip")
+
+    # All data of all data nodes should be imported
+    assert all(os.path.exists(dn.path) for dn in imported_scenario.data_nodes.values())
+
+
+def test_import_scenario_when_entities_are_already_existed_should_rollback(caplog):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg)
+
+    caplog.clear()
+
+    _CycleManager._delete_all()
+    _TaskManager._delete_all()
+    _DataManager._delete_all()
+    _JobManager._delete_all()
+    _ScenarioManager._delete_all()
+
+    assert len(_CycleManager._get_all()) == 0
+    assert len(_TaskManager._get_all()) == 0
+    assert len(_DataManager._get_all()) == 0
+    assert len(_JobManager._get_all()) == 0
+    assert len(_SubmissionManager._get_all()) == 1  # Keep the submission entity to test the rollback
+    submission_id = _SubmissionManager._get_all()[0].id
+    assert len(_ScenarioManager._get_all()) == 0
+
+    # Import the scenario when the old entities still exist should raise an error
+    with pytest.raises(EntitiesToBeImportAlredyExist):
+        tp.import_scenario("tmp.zip")
+    assert all(log.levelname in ["ERROR", "INFO"] for log in caplog.records)
+    assert "An error occurred during the import" in caplog.text
+    assert f"{submission_id} already exists. Please use the 'override' parameter to override it" in caplog.text
+
+    # No entity should be imported and the old entities should be kept
+    assert len(_CycleManager._get_all()) == 0
+    assert len(_TaskManager._get_all()) == 0
+    assert len(_DataManager._get_all()) == 0
+    assert len(_JobManager._get_all()) == 0
+    assert len(_SubmissionManager._get_all()) == 1  # Keep the submission entity to test the rollback
+    assert len(_ScenarioManager._get_all()) == 0
+
+    caplog.clear()
+
+    # Import with override flag
+    tp.import_scenario("tmp.zip", override=True)
+    assert all(log.levelname in ["WARNING", "INFO"] for log in caplog.records)
+    assert f"{submission_id} already exists and will be overridden" in caplog.text
+
+    # The scenario is imported and overridden the old one
+    assert len(_ScenarioManager._get_all()) == 1
+    assert len(_CycleManager._get_all()) == 1
+    assert len(_TaskManager._get_all()) == 4
+    assert len(_DataManager._get_all()) == 5
+    assert len(_JobManager._get_all()) == 4
+    assert len(_SubmissionManager._get_all()) == 1
+    assert len(_VersionManager._get_all()) == 1
+
+
+def test_import_incompatible_scenario(init_managers):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg)
+
+    Config.unblock_update()
+
+    # Configure a new dn to make the exported version incompatible
+    Config.configure_data_node("new_dn")
+
+    with pytest.raises(ConflictedConfigurationError):
+        tp.import_scenario("tmp.zip")
+
+
+def test_import_a_non_exists_folder():
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg)
+
+    with pytest.raises(FileNotFoundError):
+        tp.import_scenario("non_exists_folder")
+
+
+def test_import_an_empty_archive(tmpdir_factory):
+    empty_folder = tmpdir_factory.mktemp("empty_folder").strpath
+    shutil.make_archive("tmp", "zip", empty_folder)
+
+    with pytest.raises(ImportArchiveDoesntContainAnyScenario):
+        tp.import_scenario("tmp.zip")
+
+
+def test_import_with_no_version(tmp_path):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg)
+
+    # Extract the zip,
+    with zipfile.ZipFile("./tmp.zip") as zip_file:
+        zip_file.extractall(tmp_path)
+    # remove the version,
+    shutil.rmtree(f"{tmp_path}/version")
+    # and archive the scenario without the version again
+    shutil.make_archive("tmp", "zip", tmp_path)
+
+    with pytest.raises(ImportScenarioDoesntHaveAVersion):
+        tp.import_scenario("tmp.zip")

+ 174 - 0
tests/core/test_taipy/test_import_with_sql_repo.py

@@ -0,0 +1,174 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+
+import pandas as pd
+import pytest
+
+import taipy.core.taipy as tp
+from taipy import Config, Frequency, Scope
+from taipy.core._version._version_manager import _VersionManager
+from taipy.core.cycle._cycle_manager import _CycleManager
+from taipy.core.data._data_manager import _DataManager
+from taipy.core.exceptions.exceptions import ConflictedConfigurationError, EntitiesToBeImportAlredyExist
+from taipy.core.job._job_manager import _JobManager
+from taipy.core.scenario._scenario_manager import _ScenarioManager
+from taipy.core.submission._submission_manager import _SubmissionManager
+from taipy.core.task._task_manager import _TaskManager
+
+
+@pytest.fixture(scope="function", autouse=True)
+def clean_export_zip_file():
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
+    yield
+    if os.path.exists("./tmp.zip"):
+        os.remove("./tmp.zip")
+
+
+def plus_1(x):
+    return x + 1
+
+
+def plus_1_dataframe(x):
+    return pd.DataFrame({"output": [x + 1]})
+
+
+def configure_test_scenario(input_data, frequency=None):
+    input_cfg = Config.configure_data_node(
+        id=f"i_{input_data}", storage_type="pickle", scope=Scope.SCENARIO, default_data=input_data
+    )
+    csv_output_cfg = Config.configure_data_node(id=f"o_{input_data}_csv", storage_type="csv")
+    excel_output_cfg = Config.configure_data_node(id=f"o_{input_data}_excel", storage_type="excel")
+    parquet_output_cfg = Config.configure_data_node(id=f"o_{input_data}_parquet", storage_type="parquet")
+    json_output_cfg = Config.configure_data_node(id=f"o_{input_data}_json", storage_type="json")
+
+    csv_task_cfg = Config.configure_task(f"t_{input_data}_csv", plus_1_dataframe, input_cfg, csv_output_cfg)
+    excel_task_cfg = Config.configure_task(f"t_{input_data}_excel", plus_1_dataframe, input_cfg, excel_output_cfg)
+    parquet_task_cfg = Config.configure_task(f"t_{input_data}_parquet", plus_1_dataframe, input_cfg, parquet_output_cfg)
+    json_task_cfg = Config.configure_task(f"t_{input_data}_json", plus_1, input_cfg, json_output_cfg)
+    scenario_cfg = Config.configure_scenario(
+        id=f"s_{input_data}",
+        task_configs=[csv_task_cfg, excel_task_cfg, parquet_task_cfg, json_task_cfg],
+        frequency=frequency,
+    )
+
+    return scenario_cfg
+
+
+def export_test_scenario(scenario_cfg, export_path="tmp.zip", override=False, include_data=False):
+    scenario = tp.create_scenario(scenario_cfg)
+    tp.submit(scenario)
+
+    # Export the submitted scenario
+    tp.export_scenario(scenario.id, export_path, override, include_data)
+    return scenario
+
+
+def test_import_scenario_without_data(init_sql_repo, init_managers):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    scenario = export_test_scenario(scenario_cfg)
+
+    init_managers()
+
+    assert _ScenarioManager._get_all() == []
+    imported_scenario = tp.import_scenario("tmp.zip")
+
+    # The imported scenario should be the same as the exported scenario
+    assert _ScenarioManager._get_all() == [imported_scenario]
+    assert imported_scenario == scenario
+
+    # All entities belonging to the scenario should be imported
+    assert len(_CycleManager._get_all()) == 1
+    assert len(_TaskManager._get_all()) == 4
+    assert len(_DataManager._get_all()) == 5
+    assert len(_JobManager._get_all()) == 4
+    assert len(_SubmissionManager._get_all()) == 1
+    assert len(_VersionManager._get_all()) == 1
+
+
+def test_import_scenario_with_data(init_sql_repo, init_managers):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg, include_data=True)
+
+    init_managers()
+
+    assert _ScenarioManager._get_all() == []
+    imported_scenario = tp.import_scenario("tmp.zip")
+
+    # All data of all data nodes should be imported
+    assert all(os.path.exists(dn.path) for dn in imported_scenario.data_nodes.values())
+
+
+def test_import_scenario_when_entities_are_already_existed_should_rollback(init_sql_repo, caplog):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg)
+
+    caplog.clear()
+
+    _CycleManager._delete_all()
+    _TaskManager._delete_all()
+    _DataManager._delete_all()
+    _JobManager._delete_all()
+    _ScenarioManager._delete_all()
+
+    assert len(_CycleManager._get_all()) == 0
+    assert len(_TaskManager._get_all()) == 0
+    assert len(_DataManager._get_all()) == 0
+    assert len(_JobManager._get_all()) == 0
+    assert len(_SubmissionManager._get_all()) == 1  # Keep the submission entity to test the rollback
+    submission_id = _SubmissionManager._get_all()[0].id
+    assert len(_ScenarioManager._get_all()) == 0
+
+    # Import the scenario when the old entities still exist should raise an error
+    with pytest.raises(EntitiesToBeImportAlredyExist):
+        tp.import_scenario("tmp.zip")
+    assert all(log.levelname in ["ERROR", "INFO"] for log in caplog.records)
+    assert "An error occurred during the import" in caplog.text
+    assert f"{submission_id} already exists. Please use the 'override' parameter to override it" in caplog.text
+
+    # No entity should be imported and the old entities should be kept
+    assert len(_CycleManager._get_all()) == 0
+    assert len(_TaskManager._get_all()) == 0
+    assert len(_DataManager._get_all()) == 0
+    assert len(_JobManager._get_all()) == 0
+    assert len(_SubmissionManager._get_all()) == 1  # Keep the submission entity to test the rollback
+    assert len(_ScenarioManager._get_all()) == 0
+
+    caplog.clear()
+
+    # Import with override flag
+    tp.import_scenario("tmp.zip", override=True)
+    assert all(log.levelname in ["WARNING", "INFO"] for log in caplog.records)
+    assert f"{submission_id} already exists and will be overridden" in caplog.text
+
+    # The scenario is imported and overridden the old one
+    assert len(_ScenarioManager._get_all()) == 1
+    assert len(_CycleManager._get_all()) == 1
+    assert len(_TaskManager._get_all()) == 4
+    assert len(_DataManager._get_all()) == 5
+    assert len(_JobManager._get_all()) == 4
+    assert len(_SubmissionManager._get_all()) == 1
+    assert len(_VersionManager._get_all()) == 1
+
+
+def test_import_incompatible_scenario(init_sql_repo, init_managers):
+    scenario_cfg = configure_test_scenario(1, frequency=Frequency.DAILY)
+    export_test_scenario(scenario_cfg)
+
+    Config.unblock_update()
+
+    # Configure a new dn to make the exported version incompatible
+    Config.configure_data_node("new_dn")
+
+    with pytest.raises(ConflictedConfigurationError):
+        tp.import_scenario("tmp.zip")

+ 1 - 1
tests/gui/extension/test_library.py

@@ -162,7 +162,7 @@ def test_lib_inner_md(gui: Gui, test_client, helpers):
     expected = [
         "<TestLib_Inner",
         "value={tpec_TpExPr_val_TPMDL_0}",
-        "withProperty={tpec_TpExPr_tpec_TpExPr_val_TPMDL_0_TPMDL_0}",
+        "withProperty={tpec_TpExPr_val_TPMDL_0}",
     ]
     helpers.test_control_md(gui, md_string, expected)
 

+ 6 - 6
tests/gui_core/test_context_is_readable.py

@@ -328,7 +328,7 @@ class TestGuiCoreContext_is_readable:
     def test_get_scenarios_for_owner(self):
         with patch("taipy.gui_core._context.core_get", side_effect=mock_core_get) as mockget:
             gui_core_context = _GuiCoreContext(Mock())
-            gui_core_context.get_scenarios_for_owner(a_scenario.id)
+            gui_core_context.get_scenarios_for_owner(a_scenario.id, '')
             mockget.assert_called_once()
             mockget.reset_mock()
 
@@ -406,18 +406,18 @@ class TestGuiCoreContext_is_readable:
     def test_get_data_node_tabular_data(self):
         with patch("taipy.gui_core._context.core_get", side_effect=mock_core_get) as mockget:
             gui_core_context = _GuiCoreContext(Mock())
-            gui_core_context.get_data_node_tabular_data(a_datanode, a_datanode.id)
+            gui_core_context.get_data_node_tabular_data(a_datanode.id, "")
             mockget.assert_called_once()
             mockget.reset_mock()
 
             with patch("taipy.gui_core._context.is_readable", side_effect=mock_is_readable_false):
-                gui_core_context.get_data_node_tabular_data(a_datanode, a_datanode.id)
+                gui_core_context.get_data_node_tabular_data(a_datanode.id, "")
                 mockget.assert_not_called()
 
     def test_get_data_node_tabular_columns(self):
         with patch("taipy.gui_core._context.core_get", side_effect=mock_core_get) as mockget:
             gui_core_context = _GuiCoreContext(Mock())
-            gui_core_context.get_data_node_tabular_columns(a_datanode, a_datanode.id)
+            gui_core_context.get_data_node_tabular_columns(a_datanode.id, "")
             mockget.assert_called_once()
             mockget.reset_mock()
 
@@ -428,10 +428,10 @@ class TestGuiCoreContext_is_readable:
     def test_get_data_node_chart_config(self):
         with patch("taipy.gui_core._context.core_get", side_effect=mock_core_get) as mockget:
             gui_core_context = _GuiCoreContext(Mock())
-            gui_core_context.get_data_node_chart_config(a_datanode, a_datanode.id)
+            gui_core_context.get_data_node_chart_config(a_datanode.id, "")
             mockget.assert_called_once()
             mockget.reset_mock()
 
             with patch("taipy.gui_core._context.is_readable", side_effect=mock_is_readable_false):
-                gui_core_context.get_data_node_chart_config(a_datanode, a_datanode.id)
+                gui_core_context.get_data_node_chart_config(a_datanode.id, "")
                 mockget.assert_not_called()

+ 17 - 0
tests/templates/test_template_cli.py

@@ -9,6 +9,7 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+import os
 from unittest.mock import patch
 
 import pytest
@@ -29,3 +30,19 @@ def test_create_cli_with_unsupported_template(capsys):
             _entrypoint()
         _, err = capsys.readouterr()
         assert "invalid choice: 'not-a-template'" in err
+
+
+def test_create_app_on_existing_folder(tmpdir, capsys, monkeypatch):
+    os.chdir(tmpdir)
+    os.mkdir(os.path.join(tmpdir, "taipy_application"))
+
+    # Mock the click.prompt to always return the default value
+    monkeypatch.setattr("click.prompt", lambda *args, **kw: kw["default"] if "default" in kw else "")
+    monkeypatch.setattr("builtins.input", lambda *args, **kw: "")
+
+    with patch("sys.argv", ["prog", "create"]):
+        with pytest.raises(SystemExit):
+            _entrypoint()
+
+    out, _ = capsys.readouterr()
+    assert '"taipy_application" directory already exists' in out

+ 16 - 16
tools/packages/pipfiles/Pipfile3.10.max

@@ -50,34 +50,34 @@ version = "==4.2.13"
 
 
 [packages]
-"pyarrow" = {version="==15.0.0"}
-"networkx" = {version="==3.2.1", markers="python_version>'3.8'"}
+"pyarrow" = {version="==16.0.0"}
+"networkx" = {version="==3.3", markers="python_version>'3.8'"}
 "openpyxl" = {version="==3.1.2"}
-"pandas" = {version="==2.2.0", markers="python_version>'3.8'"}
-"pymongo" = {version="==4.6.1", extras=["srv"]}
-"sqlalchemy" = {version="==2.0.25"}
+"pandas" = {version="==2.2.2", markers="python_version>'3.8'"}
+"pymongo" = {version="==4.7.0", extras=["srv"]}
+"sqlalchemy" = {version="==2.0.29"}
 "toml" = {version="==0.10.2"}
-"boto3" = {version="==1.34.25"}
+"boto3" = {version="==1.34.93"}
 "backports.zoneinfo" = {version="==0.2.1", markers="python_version<'3.9'"}
-"cookiecutter" = {version="==2.5.0"}
-"flask" = {version="==3.0.1"}
+"cookiecutter" = {version="==2.6.0"}
+"flask" = {version="==3.0.3"}
 "flask-cors" = {version="==4.0.0"}
 "flask-socketio" = {version="==5.3.6"}
-"markdown" = {version="==3.5.2"}
+"markdown" = {version="==3.6"}
 "python-dotenv" = {version="==1.0.1"}
-"pytz" = {version="==2023.3.post1"}
+"pytz" = {version="==2024.1"}
 "tzlocal" = {version="==5.2"}
-"gevent" = {version="==23.9.1"}
+"gevent" = {version="==24.2.1"}
 "gevent-websocket" = {version="==0.10.1"}
 "kthread" = {version="==0.2.3"}
 "gitignore-parser" = {version="==0.1.11"}
 "simple-websocket" = {version="==1.0.0"}
-"twisted" = {version="==23.10.0"}
-"deepdiff" = {version="==6.7.1"}
+"twisted" = {version="==24.3.0"}
+"deepdiff" = {version="==7.0.1"}
 "flask-restful" = {version="==0.3.10"}
 "passlib" = {version="==1.7.4"}
-"marshmallow" = {version="==3.20.2"}
-"apispec" = {version="==6.4.0", extras=["yaml"]}
-"apispec-webframeworks" = {version="==1.0.0"}
+"marshmallow" = {version="==3.21.1"}
+"apispec" = {version="==6.6.1", extras=["yaml"]}
+"apispec-webframeworks" = {version="==1.1.0"}
 "watchdog" = {version="==4.0.0"}
 "charset-normalizer" = {version="==3.3.2"}

+ 16 - 16
tools/packages/pipfiles/Pipfile3.11.max

@@ -50,34 +50,34 @@ version = "==4.2.13"
 
 
 [packages]
-"pyarrow" = {version="==15.0.0"}
-"networkx" = {version="==3.2.1", markers="python_version>'3.8'"}
+"pyarrow" = {version="==16.0.0"}
+"networkx" = {version="==3.3", markers="python_version>'3.8'"}
 "openpyxl" = {version="==3.1.2"}
-"pandas" = {version="==2.2.0", markers="python_version>'3.8'"}
-"pymongo" = {version="==4.6.1", extras=["srv"]}
-"sqlalchemy" = {version="==2.0.25"}
+"pandas" = {version="==2.2.2", markers="python_version>'3.8'"}
+"pymongo" = {version="==4.7.0", extras=["srv"]}
+"sqlalchemy" = {version="==2.0.29"}
 "toml" = {version="==0.10.2"}
-"boto3" = {version="==1.34.25"}
+"boto3" = {version="==1.34.93"}
 "backports.zoneinfo" = {version="==0.2.1", markers="python_version<'3.9'"}
-"cookiecutter" = {version="==2.5.0"}
-"flask" = {version="==3.0.1"}
+"cookiecutter" = {version="==2.6.0"}
+"flask" = {version="==3.0.3"}
 "flask-cors" = {version="==4.0.0"}
 "flask-socketio" = {version="==5.3.6"}
-"markdown" = {version="==3.5.2"}
+"markdown" = {version="==3.6"}
 "python-dotenv" = {version="==1.0.1"}
-"pytz" = {version="==2023.3.post1"}
+"pytz" = {version="==2024.1"}
 "tzlocal" = {version="==5.2"}
-"gevent" = {version="==23.9.1"}
+"gevent" = {version="==24.2.1"}
 "gevent-websocket" = {version="==0.10.1"}
 "kthread" = {version="==0.2.3"}
 "gitignore-parser" = {version="==0.1.11"}
 "simple-websocket" = {version="==1.0.0"}
-"twisted" = {version="==23.10.0"}
-"deepdiff" = {version="==6.7.1"}
+"twisted" = {version="==24.3.0"}
+"deepdiff" = {version="==7.0.1"}
 "flask-restful" = {version="==0.3.10"}
 "passlib" = {version="==1.7.4"}
-"marshmallow" = {version="==3.20.2"}
-"apispec" = {version="==6.4.0", extras=["yaml"]}
-"apispec-webframeworks" = {version="==1.0.0"}
+"marshmallow" = {version="==3.21.1"}
+"apispec" = {version="==6.6.1", extras=["yaml"]}
+"apispec-webframeworks" = {version="==1.1.0"}
 "watchdog" = {version="==4.0.0"}
 "charset-normalizer" = {version="==3.3.2"}

Some files were not shown because too many files changed in this diff