Selaa lähdekoodia

Merge branch 'develop' of https://github.com/Avaiga/taipy into fix/remove-C405-rule

ooooo 1 vuosi sitten
vanhempi
säilyke
bd939cfe27
100 muutettua tiedostoa jossa 2480 lisäystä ja 1712 poistoa
  1. 0 171
      .github/workflows/build-and-release-prod.yml
  2. 78 36
      .github/workflows/build-and-release.yml
  3. 31 10
      .github/workflows/overall-tests.yml
  4. 1 1
      .github/workflows/publish.yml
  5. 1 1
      .github/workflows/sync-project-issue-state.yml
  6. 1 1
      doc/gui/examples/charts/treemap-simple.py
  7. 4 0
      frontend/taipy-gui/base/src/app.ts
  8. 1 0
      frontend/taipy-gui/base/src/index.ts
  9. 1 1
      frontend/taipy-gui/dom/package.json
  10. 275 234
      frontend/taipy-gui/package-lock.json
  11. 1 1
      frontend/taipy-gui/package.json
  12. 1 1
      frontend/taipy-gui/packaging/package.json
  13. 14 0
      frontend/taipy-gui/public/stylekit/controls/chart.css
  14. 33 6
      frontend/taipy-gui/src/components/Taipy/AutoLoadingTable.tsx
  15. 92 46
      frontend/taipy-gui/src/components/Taipy/Chart.tsx
  16. 5 3
      frontend/taipy-gui/src/components/Taipy/FileSelector.tsx
  17. 8 0
      frontend/taipy-gui/src/components/Taipy/Navigate.tsx
  18. 66 0
      frontend/taipy-gui/src/components/Taipy/PaginatedTable.spec.tsx
  19. 30 3
      frontend/taipy-gui/src/components/Taipy/PaginatedTable.tsx
  20. 99 60
      frontend/taipy-gui/src/components/Taipy/tableUtils.tsx
  21. 168 135
      frontend/taipy/package-lock.json
  22. 1 1
      frontend/taipy/package.json
  23. 3 6
      frontend/taipy/src/DataNodeViewer.tsx
  24. 8 3
      frontend/taipy/src/ScenarioDag.tsx
  25. 4 0
      frontend/taipy/src/projectstorm/NodeWidget.tsx
  26. 13 4
      frontend/taipy/src/projectstorm/models.ts
  27. 4 3
      frontend/taipy/src/utils/diagram.ts
  28. 4 0
      frontend/taipy/src/utils/types.ts
  29. 0 2
      pyproject.toml
  30. 1 1
      taipy/config/version.json
  31. 9 5
      taipy/core/_core.py
  32. 2 3
      taipy/core/_core_cli.py
  33. 4 8
      taipy/core/_entity/_dag.py
  34. 1 1
      taipy/core/_entity/_entity.py
  35. 5 11
      taipy/core/_entity/_labeled.py
  36. 2 2
      taipy/core/_entity/_migrate/_utils.py
  37. 1 1
      taipy/core/_entity/_migrate_cli.py
  38. 5 8
      taipy/core/_entity/_reload.py
  39. 3 4
      taipy/core/_entity/submittable.py
  40. 10 0
      taipy/core/_orchestrator/_abstract_orchestrator.py
  41. 3 0
      taipy/core/_orchestrator/_dispatcher/_development_job_dispatcher.py
  42. 13 18
      taipy/core/_orchestrator/_dispatcher/_job_dispatcher.py
  43. 5 3
      taipy/core/_orchestrator/_dispatcher/_standalone_job_dispatcher.py
  44. 1 2
      taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py
  45. 23 23
      taipy/core/_orchestrator/_orchestrator.py
  46. 1 2
      taipy/core/_repository/_base_taipy_model.py
  47. 4 5
      taipy/core/_repository/_encoder.py
  48. 2 6
      taipy/core/_repository/_filesystem_repository.py
  49. 2 1
      taipy/core/data/_abstract_sql.py
  50. 9 6
      taipy/core/data/parquet.py
  51. 8 38
      taipy/core/data/sql_table.py
  52. 0 3
      taipy/core/job/_job_manager.py
  53. 16 13
      taipy/core/scenario/scenario.py
  54. 2 0
      taipy/core/setup.py
  55. 1 1
      taipy/core/version.json
  56. 2 0
      taipy/gui/_renderers/builder.py
  57. 2 0
      taipy/gui/_renderers/factory.py
  58. 4 1
      taipy/gui/_renderers/json.py
  59. 8 1
      taipy/gui/custom/_page.py
  60. 6 1
      taipy/gui/data/pandas_data_accessor.py
  61. 15 8
      taipy/gui/data/utils.py
  62. 64 11
      taipy/gui/gui.py
  63. 3 0
      taipy/gui/page.py
  64. 1 0
      taipy/gui/server.py
  65. 2 0
      taipy/gui/setup.py
  66. 1 1
      taipy/gui/version.json
  67. 12 1
      taipy/gui/viselements.json
  68. 2 0
      taipy/gui_core/_GuiCoreLib.py
  69. 21 2
      taipy/gui_core/_context.py
  70. 6 0
      taipy/gui_core/viselements.json
  71. 2 0
      taipy/rest/setup.py
  72. 1 1
      taipy/rest/version.json
  73. 1 1
      taipy/templates/version.json
  74. 1 1
      taipy/version.json
  75. 2 0
      tests/core/_entity/test_migrate_cli.py
  76. 2 10
      tests/core/_orchestrator/_dispatcher/mock_standalone_dispatcher.py
  77. 6 0
      tests/core/_orchestrator/_dispatcher/test_development_job_dispatcher.py
  78. 0 12
      tests/core/_orchestrator/_dispatcher/test_dispatcher__execute_job.py
  79. 12 9
      tests/core/_orchestrator/_dispatcher/test_standalone_job_dispatcher.py
  80. 0 3
      tests/core/_orchestrator/_dispatcher/test_task_function_wrapper.py
  81. 28 40
      tests/core/_orchestrator/test_orchestrator.py
  82. 4 3
      tests/core/_orchestrator/test_orchestrator__cancel_jobs.py
  83. 3 6
      tests/core/config/test_core_version.py
  84. 1 1
      tests/core/data/test_filter_data_node.py
  85. 139 0
      tests/core/data/test_filter_parquet_data_node.py
  86. 207 0
      tests/core/data/test_filter_sql_table_data_node.py
  87. 2 2
      tests/core/data/test_generic_data_node.py
  88. 0 327
      tests/core/data/test_parquet_data_node.py
  89. 188 0
      tests/core/data/test_read_parquet_data_node.py
  90. 176 0
      tests/core/data/test_read_sql_table_data_node.py
  91. 1 326
      tests/core/data/test_sql_table_data_node.py
  92. 236 0
      tests/core/data/test_write_parquet_data_node.py
  93. 186 0
      tests/core/data/test_write_sql_table_data_node.py
  94. 29 32
      tests/core/job/test_job_manager.py
  95. 8 9
      tests/core/job/test_job_manager_with_sql_repo.py
  96. 1 1
      tests/core/scenario/test_scenario_manager.py
  97. 1 1
      tests/core/scenario/test_scenario_manager_with_sql_repo.py
  98. 3 3
      tools/gui/generate_pyi.py
  99. 2 0
      tools/packages/taipy-config/MANIFEST.in
  100. 9 4
      tools/packages/taipy-config/setup.py

+ 0 - 171
.github/workflows/build-and-release-prod.yml

@@ -1,171 +0,0 @@
-name: Build a prod version for all packages and release them
-
-on:
-  workflow_dispatch:
-
-jobs:
-  fetch-versions:
-    runs-on: ubuntu-latest
-    outputs:
-        config_VERSION: ${{ steps.version-setup.outputs.config_VERSION }}
-        core_VERSION: ${{ steps.version-setup.outputs.core_VERSION }}
-        gui_VERSION: ${{ steps.version-setup.outputs.gui_VERSION }}
-        rest_VERSION: ${{ steps.version-setup.outputs.rest_VERSION }}
-        templates_VERSION: ${{ steps.version-setup.outputs.templates_VERSION }}
-        VERSION: ${{ steps.version-setup.outputs.VERSION }}
-        NEW_VERSION: ${{ steps.version-setup.outputs.NEW_VERSION }}
-    steps:
-      - uses: actions/checkout@v4
-      - name: Setup Dev Version
-        id: version-setup
-        run: |
-          python tools/release/setup_version.py ALL production >> $GITHUB_OUTPUT
-
-  build-and-release-taipy-packages:
-    needs: [fetch-versions]
-    timeout-minutes: 20
-    runs-on: ubuntu-latest
-    strategy:
-      matrix:
-        package: [config, core, gui, rest, templates]
-      max-parallel: 1
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          ssh-key: ${{secrets.DEPLOY_KEY}}
-      - uses: actions/setup-python@v4
-        with:
-          python-version: 3.9
-      - uses: actions/setup-node@v4
-        with:
-          node-version: '20'
-
-      - name: Extract commit hash
-        shell: bash
-        run: echo "HASH=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
-        id: extract_hash
-
-      - name: Set Build Variables
-        id: set-variables
-        run: |
-          if [ "${{ matrix.package }}" == "config" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.config_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/config" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.config_VERSION}}-config" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-config-${{needs.fetch-versions.outputs.config_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
-          elif [ "${{ matrix.package }}" == "core" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.core_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/core" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.core_VERSION}}-core" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-core-${{needs.fetch-versions.outputs.core_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
-          elif [ "${{ matrix.package }}" == "gui" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.gui_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/gui" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.gui_VERSION}}-gui" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-gui-${{needs.fetch-versions.outputs.gui_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
-          elif [ "${{ matrix.package }}" == "rest" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.rest_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/rest" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.rest_VERSION}}-rest" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-rest-${{needs.fetch-versions.outputs.rest_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
-          elif [ "${{ matrix.package }}" == "templates" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.templates_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/templates" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.templates_VERSION}}-templates" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-templates-${{needs.fetch-versions.outputs.templates_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
-          fi
-        shell: bash
-
-      - name: Update setup.requirements.txt
-        run: |
-          python tools/release/update_setup_requirements.py taipy-${{ matrix.package }} \
-            ${{needs.fetch-versions.outputs.config_VERSION}} \
-            ${{needs.fetch-versions.outputs.core_VERSION}} \
-            ${{needs.fetch-versions.outputs.gui_VERSION}} \
-            ${{needs.fetch-versions.outputs.rest_VERSION}} \
-            ${{needs.fetch-versions.outputs.templates_VERSION}} \
-            dev
-
-      - name: Install dependencies
-        run: |
-          python -m pip install --upgrade pip
-          pip install build wheel
-
-      - name: Build package
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
-        run: python setup.py build_py && python -m build
-
-      - name: Create tag and release
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
-        run: |
-          gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --title ${{ steps.set-variables.outputs.release_name }} --notes "Release ${{ steps.set-variables.outputs.release_name }}"
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Install Package
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
-        run: |
-          pip install ${{ steps.set-variables.outputs.tar_path }}
-
-  build-and-release-taipy:
-    runs-on: ubuntu-latest
-    needs: [ build-and-release-taipy-dev-packages, fetch-versions ]
-    timeout-minutes: 20
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          ssh-key: ${{secrets.DEPLOY_KEY}}
-      - name: Extract commit hash
-        shell: bash
-        run: echo "HASH=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
-        id: extract_hash
-
-      - name: Set Build Variables
-        id: set-variables
-        run: |
-          echo "package_version=${{needs.fetch-versions.outputs.VERSION}}" >> $GITHUB_OUTPUT
-          echo "release_name=${{needs.fetch-versions.outputs.VERSION}}" >> $GITHUB_OUTPUT
-          echo "tar_path=./dist/${{ github.event.repository.name }}-${{needs.fetch-versions.outputs.VERSION}}.tar.gz" >> $GITHUB_OUTPUT
-
-      - name: Install dependencies
-        run: |
-          python -m pip install --upgrade pip
-          pip install build wheel
-
-      - name: Build Taipy package
-        run: python setup.py build_py && python -m build
-
-      - name: Create tag and release Taipy
-        run: |
-          gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --title ${{ steps.set-variables.outputs.release_name }} --notes "Release ${{ steps.set-variables.outputs.release_name }}"
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Install Taipy
-        run: |
-          pip install ${{ steps.set-variables.outputs.tar_path }}
-
-      - name: Check Taipy Installation
-        run: |
-          python tools/validate_taipy_install.py
-
-      - name: Download packages
-        run: |
-          gh release download ${{ needs.fetch-versions.outputs.config_VERSION }}-config --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.core_VERSION }}-core --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.gui_VERSION }}-gui --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.rest_VERSION }}-rest --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.templates_VERSION }}-templates --skip-existing --dir dist
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Bundle all packages in main release tag
-        run: |
-          find dist -type f -print0 | xargs -r0 gh release upload ${{ needs.fetch-versions.outputs.VERSION }} --clobber
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
-      - name: Reset changes
-        run: |
-          git reset --hard HEAD
-          git clean -fdx

+ 78 - 36
.github/workflows/build-and-release-dev.yml → .github/workflows/build-and-release.yml

@@ -1,12 +1,19 @@
-name: Build a dev version for all packages and release them
+name: Build all taipy packages and release them
 
 on:
   workflow_dispatch:
     inputs:
-      publish_on_pypi:
-        description: "Should the packages be published on Pypi?"
+      internal_dep_on_pypi:
+        description: "Point taipy internal dependencies to Pypi? If false it will point to the github .tar.gz release file"
         default: "false"
         required: true
+      release_type:
+        description: "The type of release to be made (dev or production)"
+        default: "dev"
+        required: true
+      target_version:
+        description: "The version of the package to be released"
+        required: true
 
 jobs:
   fetch-versions:
@@ -21,12 +28,17 @@ jobs:
         NEW_VERSION: ${{ steps.version-setup.outputs.NEW_VERSION }}
     steps:
       - uses: actions/checkout@v4
-      - name: Setup Dev Version
+      - name: Extract branch name
+        shell: bash
+        run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
+        id: extract_branch
+
+      - name: Setup Version
         id: version-setup
         run: |
-          python tools/release/setup_version.py ALL dev >> $GITHUB_OUTPUT
+          python tools/release/setup_version.py ALL ${{ github.event.inputs.release_type }} ${{ github.event.inputs.target_version }} ${{ steps.extract_branch.outputs.branch }} >> $GITHUB_OUTPUT
 
-  build-and-release-taipy-dev-packages:
+  build-and-release-taipy-packages:
     needs: [fetch-versions]
     timeout-minutes: 20
     runs-on: ubuntu-latest
@@ -89,7 +101,7 @@ jobs:
             ${{needs.fetch-versions.outputs.gui_VERSION}} \
             ${{needs.fetch-versions.outputs.rest_VERSION}} \
             ${{needs.fetch-versions.outputs.templates_VERSION}} \
-            ${{ github.event.inputs.publish_on_pypi }}
+            ${{ github.event.inputs.internal_dep_on_pypi }}
 
       - name: Copy tools
         run: |
@@ -98,7 +110,40 @@ jobs:
       - name: Install dependencies
         run: |
           python -m pip install --upgrade pip
-          pip install build wheel
+          pip install build wheel pipenv mypy black isort
+
+      - name: Install GUI dependencies
+        if: matrix.package == 'gui'
+        run: |
+          pipenv install --dev
+
+      - name: Generate GUI pyi file
+        if: matrix.package == 'gui'
+        run: |
+          cp tools/gui/generate_pyi.py pyi_temp.py && pipenv run python pyi_temp.py && rm pyi_temp.py
+
+      - name: Build frontends
+        if: matrix.package == 'gui'
+        run: |
+          python tools/frontend/bundle_build.py
+
+      - name: Copy files from tools
+        run: |
+          cp -r tools/packages/taipy-${{matrix.package}}/. ${{ steps.set-variables.outputs.package_dir }}
+
+      - name: Build Package Structure
+        working-directory: ${{ steps.set-variables.outputs.package_dir }}
+        run: |
+          python tools/release/build_package_structure.py ${{ matrix.package }}
+
+      - name: Copy Taipy Logger
+        if: matrix.package == 'config'
+        run: |
+          cp -r taipy/logger/. ${{ steps.set-variables.outputs.package_dir }}/taipy/logger
+
+      - name: Copy _cli folder
+        run: |
+          cp -r taipy/_cli/. ${{ steps.set-variables.outputs.package_dir }}/taipy/_cli
 
       - name: Build package
         working-directory: ${{ steps.set-variables.outputs.package_dir }}
@@ -108,24 +153,18 @@ jobs:
       - name: Create tag and release
         working-directory: ${{ steps.set-variables.outputs.package_dir }}
         run: |
-          gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.set-variables.outputs.release_name }} --notes "Release Draft ${{ steps.set-variables.outputs.release_name }}"
+           if [ "${{ github.event.inputs.release_type }}" == "dev" ]; then
+            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.set-variables.outputs.release_name }} --notes "Release Draft ${{ steps.set-variables.outputs.release_name }}"
+           else
+            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --title ${{ steps.set-variables.outputs.release_name }} --notes "Release ${{ steps.set-variables.outputs.release_name }}"
+           fi
+        shell: bash
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
-      - name: Install Package
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
-        run: |
-          pip install ${{ steps.set-variables.outputs.tar_path }}
-
-      - name: Publish to PyPI
-        if: github.event.inputs.publish_on_pypi == 'true'
-        uses: pypa/gh-action-pypi-publish@release/v1
-
-  build-and-release-taipy-dev:
-    permissions:
-      id-token: write  # IMPORTANT: this permission is mandatory for trusted publishing
+  build-and-release-taipy:
     runs-on: ubuntu-latest
-    needs: [ build-and-release-taipy-dev-packages, fetch-versions ]
+    needs: [build-and-release-taipy-packages, fetch-versions ]
     timeout-minutes: 20
     steps:
       - uses: actions/checkout@v4
@@ -151,33 +190,36 @@ jobs:
             ${{needs.fetch-versions.outputs.gui_VERSION}} \
             ${{needs.fetch-versions.outputs.rest_VERSION}} \
             ${{needs.fetch-versions.outputs.templates_VERSION}} \
-            ${{ github.event.inputs.publish_on_pypi }}
+            ${{ github.event.inputs.internal_dep_on_pypi }}
 
       - name: Install dependencies
         run: |
           python -m pip install --upgrade pip
           pip install build wheel
 
-      - name: Build Taipy package
-        run: python setup.py build_py && python -m build
 
-      - name: Create tag and release Taipy
+      - name: Backup setup.py
         run: |
-          gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.set-variables.outputs.release_name }} --notes "Release Draft ${{ steps.set-variables.outputs.release_name }}"
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          mv setup.py setup.old.py
 
-      - name: Install Taipy
+      - name: Copy files from tools
         run: |
-          pip install ${{ steps.set-variables.outputs.tar_path }}
+          cp -r tools/packages/taipy/. .
 
-      - name: Check Taipy Installation
+      - name: Build Taipy package
         run: |
-          python tools/validate_taipy_install.py
+          python setup.py build_py && python -m build
 
-      - name: Publish to PyPI
-        if: github.event.inputs.publish_on_pypi == 'true'
-        uses: pypa/gh-action-pypi-publish@release/v1
+      - name: Create tag and release Taipy
+        run: |
+          if [ "${{ github.event.inputs.release_type }}" == "dev" ]; then
+            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.set-variables.outputs.release_name }} --notes "Release Draft ${{ steps.set-variables.outputs.release_name }}"
+          else
+            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --title ${{ steps.set-variables.outputs.release_name }} --notes "Release ${{ steps.set-variables.outputs.release_name }}"
+          fi
+        shell: bash
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
       - name: Download packages
         run: |

+ 31 - 10
.github/workflows/overall-tests.yml

@@ -11,8 +11,35 @@ jobs:
   partial-tests:
     uses: ./.github/workflows/partial-tests.yml
 
+  coverage:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+
+      - uses: actions/setup-python@v5
+        with:
+          python-version: '3.11'
+
+      - name: Install dependencies
+        id: install-dependencies
+        uses: ./.github/actions/install
+        with:
+          python-version: '3.11'
+          os: 'ubuntu-latest'
+          pipfile-version: 'min'
+
+      - name: Pytest
+        run: |
+          pipenv run pytest --cov=taipy --cov-report="xml:overall-coverage.xml" tests
+
+      - name: Coverage
+        uses: orgoro/coverage@v3.1
+        with:
+          coverageFile: overall-coverage.xml
+          token: ${{ secrets.GITHUB_TOKEN }}
+
   overall-tests:
-    needs: [partial-tests]
+    needs: [coverage, partial-tests]
     timeout-minutes: 50
     strategy:
       fail-fast: false
@@ -37,17 +64,11 @@ jobs:
           pipfile-version: ${{ matrix.pipfile-version }}
 
       - name: Pytest
-        run: pipenv run pytest -m "not orchestrator_dispatcher and not standalone and not teste2e" --cov=taipy --cov-append --cov-report="xml:overall-coverage.xml" --cov-report term-missing tests
-
-      - name: Coverage
-        if: matrix.os == 'ubuntu-latest' && matrix.python-version == '3.11' && matrix.pipfile-version == 'min'
-        uses: MishaKav/pytest-coverage-comment@main
-        with:
-          pytest-xml-coverage-path: ./overall-coverage.xml
-          title: Taipy Overall Coverage Report
+        run: |
+          pipenv run pytest -m "not orchestrator_dispatcher and not standalone and not teste2e" tests
 
   intermittent-tests:
-    needs: [partial-tests]
+    needs: [coverage, partial-tests]
     timeout-minutes: 40
     strategy:
       fail-fast: false

+ 1 - 1
.github/workflows/publish.yml

@@ -71,7 +71,7 @@ jobs:
   publish-main-package-to-pypi:
     permissions:
       id-token: write  # IMPORTANT: this permission is mandatory for trusted publishing
-    needs: [ test-package ]
+    needs: [publish-subpackages-to-pypi, test-package ]
     timeout-minutes: 20
     environment: publish
     runs-on: ubuntu-latest

+ 1 - 1
.github/workflows/sync-project-issue-state.yml

@@ -14,7 +14,7 @@ jobs:
 
     steps:
       - name: Sync issue states
-        uses: dasmerlon/project-issue-state-sync@v2
+        uses: dasmerlon/project-issue-state-sync@v2.0.1
         with:
           # The secrets.PROJECT_ISSUE_SYNC_TOKEN is a fine-grained PAT with the following permissions:
           #   - Repo

+ 1 - 1
doc/gui/examples/charts/treemap-simple.py

@@ -21,7 +21,7 @@ fibonacci = [0, 1]
 for i in range(2, n_numbers):
     fibonacci.append(fibonacci[i - 1] + fibonacci[i - 2])
 
-data = {"index": [i for i in range(1, n_numbers + 1)], "fibonacci": fibonacci}
+data = {"index": list(range(1, n_numbers + 1)), "fibonacci": fibonacci}
 
 page = """
 # TreeMap - Simple

+ 4 - 0
frontend/taipy-gui/base/src/app.ts

@@ -141,6 +141,10 @@ export class TaipyApp {
     upload(encodedName: string, files: FileList, progressCallback: (val: number) => void) {
         return uploadFile(encodedName, files, progressCallback, this.clientId);
     }
+
+    getPageMetadata() {
+        return JSON.parse(localStorage.getItem("tp_cp_meta") || "{}");
+    }
 }
 
 export const createApp = (onInit?: OnInitHandler, onChange?: OnChangeHandler, path?: string, socket?: Socket) => {

+ 1 - 0
frontend/taipy-gui/base/src/index.ts

@@ -7,4 +7,5 @@ export type { OnChangeHandler, OnInitHandler, ModuleData };
 
 window.addEventListener("beforeunload", () => {
     document.cookie = "tprh=;path=/;Max-Age=-99999999;";
+    localStorage.removeItem("tp_cp_meta");
 });

+ 1 - 1
frontend/taipy-gui/dom/package.json

@@ -1,6 +1,6 @@
 {
   "name": "taipy-gui-dom",
-  "version": "3.1.0",
+  "version": "3.2.0",
   "private": true,
   "dependencies": {
     "react": "^18.2.0",

Tiedoston diff-näkymää rajattu, sillä se on liian suuri
+ 275 - 234
frontend/taipy-gui/package-lock.json


+ 1 - 1
frontend/taipy-gui/package.json

@@ -1,6 +1,6 @@
 {
   "name": "taipy-gui",
-  "version": "3.1.0",
+  "version": "3.2.0",
   "private": true,
   "dependencies": {
     "@emotion/react": "^11.10.0",

+ 1 - 1
frontend/taipy-gui/packaging/package.json

@@ -1,6 +1,6 @@
 {
   "name": "taipy-gui",
-  "version": "3.1.0",
+  "version": "3.2.0",
   "private": true,
   "main": "./taipy-gui.js",
   "types": "./taipy-gui.d.ts"

+ 14 - 0
frontend/taipy-gui/public/stylekit/controls/chart.css

@@ -41,3 +41,17 @@
 .taipy-chart:not(.has-background) .main-svg {
   background: transparent !important;
 }
+
+.js-plotly-plot.full-screen {
+    position: fixed !important;
+    height: 99vh !important;
+    width: 99vw !important;
+    display: block !important;
+    left: 0;
+    top: 0;
+    z-index: 99999;
+    overflow: hidden;
+    background-color: var(--color-background);
+    box-shadow: 10px 5px 5px var(--color-contrast);
+    transition: left 1s ease, width 1s ease, top 1s ease, height 1s ease;
+}

+ 33 - 6
frontend/taipy-gui/src/components/Taipy/AutoLoadingTable.tsx

@@ -30,6 +30,7 @@ import Tooltip from "@mui/material/Tooltip";
 import AddIcon from "@mui/icons-material/Add";
 import DataSaverOn from "@mui/icons-material/DataSaverOn";
 import DataSaverOff from "@mui/icons-material/DataSaverOff";
+import Download from "@mui/icons-material/Download";
 
 import {
     createRequestInfiniteTableUpdateAction,
@@ -61,6 +62,7 @@ import {
     getTooltip,
     defaultColumns,
     OnRowClick,
+    DownloadAction,
 } from "./tableUtils";
 import {
     useClassNames,
@@ -108,7 +110,7 @@ const Row = ({
         onRowSelection,
         onRowClick,
         lineStyle,
-        nanValue,
+        nanValue
     },
 }: {
     index: number;
@@ -181,6 +183,7 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
         onAction = "",
         size = DEFAULT_SIZE,
         userData,
+        downloadable = false,
     } = props;
     const [rows, setRows] = useState<RowType[]>([]);
     const [rowCount, setRowCount] = useState(1000); // need something > 0 to bootstrap the infinite loader
@@ -274,7 +277,7 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
                         col.tooltip = props.tooltip;
                     }
                 });
-                addDeleteColumn((active && (onAdd || onDelete) ? 1 : 0) + (active && filter ? 1 : 0), baseColumns);
+                addDeleteColumn((active && (onAdd || onDelete) ? 1 : 0) + (active && filter ? 1 : 0) + (active && downloadable ? 1 : 0), baseColumns);
                 const colsOrder = Object.keys(baseColumns).sort(getsortByIndex(baseColumns));
                 const styTt = colsOrder.reduce<Record<string, Record<string, string>>>((pv, col) => {
                     if (baseColumns[col].style) {
@@ -305,7 +308,7 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
             hNan,
             false,
         ];
-    }, [active, editable, onAdd, onDelete, baseColumns, props.lineStyle, props.tooltip, props.nanValue, props.filter]);
+    }, [active, editable, onAdd, onDelete, baseColumns, props.lineStyle, props.tooltip, props.nanValue, props.filter, downloadable]);
 
     const boxBodySx = useMemo(() => ({ height: height }), [height]);
 
@@ -405,6 +408,17 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
         [visibleStartIndex, dispatch, updateVarName, onAdd, module, userData]
     );
 
+    const onDownload = useCallback(
+        () =>
+            dispatch(
+                createSendActionNameAction(updateVarName, module, {
+                    action: DownloadAction,
+                    user_data: userData,
+                })
+            ),
+        [dispatch, updateVarName, module, userData]
+    );
+
     const isItemLoaded = useCallback((index: number) => index < rows.length && !!rows[index], [rows]);
 
     const onCellValidation: OnCellValidation = useCallback(
@@ -436,12 +450,14 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
     );
 
     const onRowSelection: OnRowSelection = useCallback(
-        (rowIndex: number, colName?: string) =>
+        (rowIndex: number, colName?: string, value?: string) =>
             dispatch(
                 createSendActionNameAction(updateVarName, module, {
                     action: onAction,
                     index: getRowIndex(rows[rowIndex], rowIndex),
                     col: colName === undefined ? null : colName,
+                    value,
+                    reason: value === undefined ? "click": "button",
                     user_data: userData,
                 })
             ),
@@ -488,7 +504,7 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
             onRowSelection: active && onAction ? onRowSelection : undefined,
             onRowClick: active && onAction ? onRowClick : undefined,
             lineStyle: props.lineStyle,
-            nanValue: props.nanValue,
+            nanValue: props.nanValue
         }),
         [
             rows,
@@ -507,7 +523,7 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
             onRowClick,
             props.lineStyle,
             props.nanValue,
-            size,
+            size
         ]
     );
 
@@ -550,6 +566,17 @@ const AutoLoadingTable = (props: TaipyTableProps) => {
                                                             className={className}
                                                         />
                                                     ) : null,
+                                                    active && downloadable ? (
+                                                        <Tooltip title="Download as CSV" key="downloadCsv">
+                                                            <IconButton
+                                                                onClick={onDownload}
+                                                                size="small"
+                                                                sx={iconInRowSx}
+                                                            >
+                                                                <Download fontSize="inherit" />
+                                                            </IconButton>
+                                                        </Tooltip>
+                                                    ) : null,
                                                 ]
                                             ) : (
                                                 <TableSortLabel

+ 92 - 46
frontend/taipy-gui/src/components/Taipy/Chart.tsx

@@ -11,8 +11,27 @@
  * specific language governing permissions and limitations under the License.
  */
 
-import React, { CSSProperties, useCallback, useEffect, useMemo, useRef, useState, lazy, Suspense } from "react";
-import { Data, Layout, PlotDatum, PlotMarker, PlotRelayoutEvent, PlotSelectionEvent, ScatterLine } from "plotly.js";
+import React, {
+    CSSProperties,
+    useCallback,
+    useEffect,
+    useMemo,
+    useRef,
+    useState,
+    lazy,
+    Suspense,
+} from "react";
+import {
+    Config,
+    Data,
+    Layout,
+    ModeBarButtonAny,
+    PlotDatum,
+    PlotMarker,
+    PlotRelayoutEvent,
+    PlotSelectionEvent,
+    ScatterLine,
+} from "plotly.js";
 import Skeleton from "@mui/material/Skeleton";
 import Box from "@mui/material/Box";
 import Tooltip from "@mui/material/Tooltip";
@@ -85,9 +104,11 @@ const defaultStyle = { position: "relative", display: "inline-block" };
 const indexedData = /^(\d+)\/(.*)/;
 
 const getColNameFromIndexed = (colName: string): string => {
-    const reRes = indexedData.exec(colName);
-    if (reRes && reRes.length > 2) {
-        return reRes[2] || colName;
+    if (colName) {
+        const reRes = indexedData.exec(colName);
+        if (reRes && reRes.length > 2) {
+            return reRes[2] || colName;
+        }
     }
     return colName;
 };
@@ -196,6 +217,29 @@ const defaultConfig = {
 const emptyLayout = {} as Record<string, Record<string, unknown>>;
 const emptyData = {} as Record<string, TraceValueType>;
 
+const TaipyPlotlyButtons: ModeBarButtonAny[] = [
+    {
+        name: "Full screen",
+        title: "Full screen",
+        icon: {
+            height: 24,
+            width: 24,
+            path: "M7 14H5v5h5v-2H7v-3zm-2-4h2V7h3V5H5v5zm12 7h-3v2h5v-5h-2v3zM14 5v2h3v3h2V5h-5z",
+        },
+        click: function (gd: HTMLElement, evt: Event) {
+            const title = gd.classList.toggle("full-screen") ? "Exit Full screen" : "Full screen";
+            (evt.currentTarget as HTMLElement).setAttribute("data-title", title);
+            const {height} = gd.dataset;
+            if (height) {
+                gd.attributeStyleMap.set("height", height);
+            } else {
+                gd.setAttribute("data-height", getComputedStyle(gd.querySelector(".svg-container") || gd).height)
+            }
+            window.dispatchEvent(new Event('resize'));
+        },
+    },
+];
+
 const Chart = (props: ChartProp) => {
     const {
         title = "",
@@ -293,7 +337,7 @@ const Chart = (props: ChartProp) => {
     useDispatchRequestUpdateOnFirstRender(dispatch, id, module, updateVars);
 
     const layout = useMemo(() => {
-        const layout = {...baseLayout};
+        const layout = { ...baseLayout };
         let template = undefined;
         try {
             const tpl = props.template && JSON.parse(props.template);
@@ -320,18 +364,19 @@ const Chart = (props: ChartProp) => {
         }
         return {
             ...layout,
+            autosize: true,
             title: title || layout.title,
             xaxis: {
                 title:
                     config.traces.length && config.traces[0].length && config.traces[0][0]
-                        ? getColNameFromIndexed(config.columns[config.traces[0][0]].dfid)
+                        ? getColNameFromIndexed(config.columns[config.traces[0][0]]?.dfid)
                         : undefined,
                 ...layout.xaxis,
             },
             yaxis: {
                 title:
                     config.traces.length == 1 && config.traces[0].length > 1 && config.columns[config.traces[0][1]]
-                        ? getColNameFromIndexed(config.columns[config.traces[0][1]].dfid)
+                        ? getColNameFromIndexed(config.columns[config.traces[0][1]]?.dfid)
                         : undefined,
                 ...layout.yaxis,
             },
@@ -376,7 +421,7 @@ const Chart = (props: ChartProp) => {
                           getArrayValue(config.names, idx) ||
                           (config.columns[trace[1]] ? getColNameFromIndexed(config.columns[trace[1]].dfid) : undefined),
                   } as Record<string, unknown>;
-                  ret.marker = getArrayValue(config.markers, idx, ret.marker || {});
+                  ret.marker = {...getArrayValue(config.markers, idx, ret.marker || {})};
                   MARKER_TO_COL.forEach((prop) => {
                       const val = (ret.marker as Record<string, unknown>)[prop];
                       if (typeof val === "string") {
@@ -446,7 +491,7 @@ const Chart = (props: ChartProp) => {
     }, [props.figure, selected, data, config, dataKey]);
 
     const plotConfig = useMemo(() => {
-        let plconf = {};
+        let plconf: Partial<Config> = {};
         if (props.plotConfig) {
             try {
                 plconf = JSON.parse(props.plotConfig);
@@ -458,47 +503,46 @@ const Chart = (props: ChartProp) => {
                 plconf = {};
             }
         }
-        if (active) {
-            return plconf;
-        } else {
-            return { ...plconf, staticPlot: true };
+        plconf.modeBarButtonsToAdd = TaipyPlotlyButtons;
+        plconf.responsive = true;
+        plconf.autosizable = true;
+        if (!active) {
+            plconf.staticPlot = true;
         }
+        return plconf;
     }, [active, props.plotConfig]);
 
     const onRelayout = useCallback(
         (eventData: PlotRelayoutEvent) => {
-            if (Object.keys(eventData).some((k) => k.startsWith("xaxis."))) {
-                onRangeChange &&
-                    dispatch(createSendActionNameAction(id, module, { action: onRangeChange, ...eventData }));
-                if (config.decimators && !config.types.includes("scatter3d")) {
-                    const backCols = Object.values(config.columns).map((col) => col.dfid);
-                    const eventDataKey = Object.entries(eventData)
-                        .map(([k, v]) => `${k}=${v}`)
-                        .join("-");
-                    const dtKey =
-                        backCols.join("-") +
-                        (config.decimators ? `--${config.decimators.join("")}` : "") +
-                        "--" +
-                        eventDataKey;
-                    setDataKey(dtKey);
-                    dispatch(
-                        createRequestChartUpdateAction(
-                            updateVarName,
-                            id,
-                            module,
-                            backCols,
-                            dtKey,
-                            getDecimatorsPayload(
-                                config.decimators,
-                                plotRef.current,
-                                config.modes,
-                                config.columns,
-                                config.traces,
-                                eventData
-                            )
+            onRangeChange && dispatch(createSendActionNameAction(id, module, { action: onRangeChange, ...eventData }));
+            if (config.decimators && !config.types.includes("scatter3d")) {
+                const backCols = Object.values(config.columns).map((col) => col.dfid);
+                const eventDataKey = Object.entries(eventData)
+                    .map(([k, v]) => `${k}=${v}`)
+                    .join("-");
+                const dtKey =
+                    backCols.join("-") +
+                    (config.decimators ? `--${config.decimators.join("")}` : "") +
+                    "--" +
+                    eventDataKey;
+                setDataKey(dtKey);
+                dispatch(
+                    createRequestChartUpdateAction(
+                        updateVarName,
+                        id,
+                        module,
+                        backCols,
+                        dtKey,
+                        getDecimatorsPayload(
+                            config.decimators,
+                            plotRef.current,
+                            config.modes,
+                            config.columns,
+                            config.traces,
+                            eventData
                         )
-                    );
-                }
+                    )
+                );
             }
         },
         [
@@ -558,7 +602,7 @@ const Chart = (props: ChartProp) => {
     );
 
     return render ? (
-        <Box id={id} key="div" data-testid={props.testId} className={className} ref={plotRef}>
+        <Box id={id} data-testid={props.testId} className={className} ref={plotRef}>
             <Tooltip title={hover || ""}>
                 <Suspense fallback={<Skeleton key="skeleton" sx={skelStyle} />}>
                     {Array.isArray(props.figure) && props.figure.length && props.figure[0].data !== undefined ? (
@@ -571,6 +615,7 @@ const Chart = (props: ChartProp) => {
                             onSelected={onSelect}
                             onDeselect={onSelect}
                             config={plotConfig}
+                            useResizeHandler
                         />
                     ) : (
                         <Plot
@@ -583,6 +628,7 @@ const Chart = (props: ChartProp) => {
                             onDeselect={isOnClick(config.types) ? undefined : onSelect}
                             onClick={isOnClick(config.types) ? onSelect : undefined}
                             config={plotConfig}
+                            useResizeHandler
                         />
                     )}
                 </Suspense>

+ 5 - 3
frontend/taipy-gui/src/components/Taipy/FileSelector.tsx

@@ -30,6 +30,7 @@ interface FileSelectorProps extends TaipyActiveProps {
     multiple?: boolean;
     extensions?: string;
     dropMessage?: string;
+    notify?: boolean;
 }
 
 const handleDragOver = (evt: DragEvent) => {
@@ -50,6 +51,7 @@ const FileSelector = (props: FileSelectorProps) => {
         extensions = ".csv,.xlsx",
         dropMessage = "Drop here to Upload",
         label,
+        notify = true,
     } = props;
     const [dropLabel, setDropLabel] = useState("");
     const [dropSx, setDropSx] = useState(defaultSx);
@@ -74,20 +76,20 @@ const FileSelector = (props: FileSelectorProps) => {
                     (value) => {
                         setUpload(false);
                         onAction && dispatch(createSendActionNameAction(id, module, onAction));
-                        dispatch(
+                        notify && dispatch(
                             createAlertAction({ atype: "success", message: value, system: false, duration: 3000 })
                         );
                     },
                     (reason) => {
                         setUpload(false);
-                        dispatch(
+                        notify && dispatch(
                             createAlertAction({ atype: "error", message: reason, system: false, duration: 3000 })
                         );
                     }
                 );
             }
         },
-        [state.id, id, onAction, updateVarName, dispatch, module]
+        [state.id, id, onAction, notify, updateVarName, dispatch, module]
     );
 
     const handleChange = useCallback(

+ 8 - 0
frontend/taipy-gui/src/components/Taipy/Navigate.tsx

@@ -34,9 +34,14 @@ const Navigate = ({ to, params, tab, force }: NavigateProps) => {
             const searchParams = new URLSearchParams(params || "");
             // Handle Resource Handler Id
             let tprh: string | null = null;
+            let meta: string | null = null;
             if (searchParams.has("tprh")) {
                 tprh = searchParams.get("tprh");
                 searchParams.delete("tprh");
+                if (searchParams.has("tp_cp_meta")) {
+                    meta = searchParams.get("tp_cp_meta");
+                    searchParams.delete("tp_cp_meta");
+                }
             }
             if (Object.keys(state.locations || {}).some((route) => tos === route)) {
                 const searchParamsLocation = new URLSearchParams(location.search);
@@ -47,6 +52,9 @@ const Navigate = ({ to, params, tab, force }: NavigateProps) => {
                     if (tprh !== null) {
                         // Add a session cookie for the resource handler id
                         document.cookie = `tprh=${tprh};path=/;`;
+                        if (meta !== null) {
+                            localStorage.setItem("tp_cp_meta", meta);
+                        }
                         navigate(0);
                     }
                 }

+ 66 - 0
frontend/taipy-gui/src/components/Taipy/PaginatedTable.spec.tsx

@@ -122,6 +122,33 @@ const editableColumns = JSON.stringify({
     Code: { dfid: "Code", type: "str", index: 3 },
 });
 
+const buttonValue = {
+    "0--1-bool,int,float,Code--asc": {
+        data: [
+            {
+                bool: true,
+                int: 856,
+                float: 1.5,
+                Code: "[Button Label](button action)",
+            },
+            {
+                bool: false,
+                int: 823,
+                float: 2.5,
+                Code: "ZZZ",
+            },
+        ],
+        rowcount: 2,
+        start: 0,
+    },
+};
+const buttonColumns = JSON.stringify({
+    bool: { dfid: "bool", type: "bool", index: 0 },
+    int: { dfid: "int", type: "int", index: 1 },
+    float: { dfid: "float", type: "float", index: 2 },
+    Code: { dfid: "Code", type: "str", index: 3 },
+});
+
 describe("PaginatedTable Component", () => {
     it("renders", async () => {
         const { getByText } = render(<PaginatedTable data={undefined} defaultColumns={tableColumns} />);
@@ -539,6 +566,45 @@ describe("PaginatedTable Component", () => {
                 args: [],
                 col: "int",
                 index: 1,
+                reason: "click",
+                value: undefined
+            },
+            type: "SEND_ACTION_ACTION",
+        });
+    });
+    it("can click on button", async () => {
+        const dispatch = jest.fn();
+        const state: TaipyState = INITIAL_STATE;
+        const { getByText, rerender } = render(
+            <TaipyContext.Provider value={{ state, dispatch }}>
+                <PaginatedTable data={undefined} defaultColumns={editableColumns} showAll={true} onAction="onSelect" />
+            </TaipyContext.Provider>
+        );
+
+        rerender(
+            <TaipyContext.Provider value={{ state: { ...state }, dispatch }}>
+                <PaginatedTable
+                    data={buttonValue as TableValueType}
+                    defaultColumns={buttonColumns}
+                    showAll={true}
+                    onAction="onSelect"
+                />
+            </TaipyContext.Provider>
+        );
+
+        dispatch.mockClear();
+        const elt = getByText("Button Label");
+        expect(elt.tagName).toBe("BUTTON");
+        await userEvent.click(elt);
+        expect(dispatch).toHaveBeenCalledWith({
+            name: "",
+            payload: {
+                action: "onSelect",
+                args: [],
+                col: "Code",
+                index: 0,
+                reason: "button",
+                value: "button action"
             },
             type: "SEND_ACTION_ACTION",
         });

+ 30 - 3
frontend/taipy-gui/src/components/Taipy/PaginatedTable.tsx

@@ -39,6 +39,7 @@ import IconButton from "@mui/material/IconButton";
 import AddIcon from "@mui/icons-material/Add";
 import DataSaverOn from "@mui/icons-material/DataSaverOn";
 import DataSaverOff from "@mui/icons-material/DataSaverOff";
+import Download from "@mui/icons-material/Download";
 
 import { createRequestTableUpdateAction, createSendActionNameAction } from "../../context/taipyReducers";
 import {
@@ -67,6 +68,7 @@ import {
     getRowIndex,
     getTooltip,
     OnRowClick,
+    DownloadAction,
 } from "./tableUtils";
 import {
     useClassNames,
@@ -102,6 +104,7 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
         width = "100%",
         size = DEFAULT_SIZE,
         userData,
+        downloadable = false,
     } = props;
     const pageSize = props.pageSize === undefined || props.pageSize < 1 ? 100 : Math.round(props.pageSize);
     const [value, setValue] = useState<Record<string, unknown>>({});
@@ -142,7 +145,7 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
                         col.tooltip = props.tooltip;
                     }
                 });
-                addDeleteColumn((active && (onAdd || onDelete) ? 1 : 0) + (active && filter ? 1 : 0), baseColumns);
+                addDeleteColumn((active && (onAdd || onDelete) ? 1 : 0) + (active && filter ? 1 : 0) + (active && downloadable ? 1 : 0), baseColumns);
                 const colsOrder = Object.keys(baseColumns).sort(getsortByIndex(baseColumns));
                 const styTt = colsOrder.reduce<Record<string, Record<string, string>>>((pv, col) => {
                     if (baseColumns[col].style) {
@@ -173,7 +176,7 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
             hNan,
             false,
         ];
-    }, [active, editable, onAdd, onDelete, baseColumns, props.lineStyle, props.tooltip, props.nanValue, props.filter]);
+    }, [active, editable, onAdd, onDelete, baseColumns, props.lineStyle, props.tooltip, props.nanValue, props.filter, downloadable]);
 
     useDispatchRequestUpdateOnFirstRender(dispatch, id, module, updateVars);
 
@@ -311,6 +314,17 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
         [startIndex, dispatch, updateVarName, onAdd, module, userData]
     );
 
+    const onDownload = useCallback(
+        () =>
+            dispatch(
+                createSendActionNameAction(updateVarName, module, {
+                    action: DownloadAction,
+                    user_data: userData,
+                })
+            ),
+        [dispatch, updateVarName, module, userData]
+    );
+
     const tableContainerSx = useMemo(() => ({ maxHeight: height }), [height]);
 
     const pso = useMemo(() => {
@@ -379,12 +393,14 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
     );
 
     const onRowSelection: OnRowSelection = useCallback(
-        (rowIndex: number, colName?: string) =>
+        (rowIndex: number, colName?: string, value?: string) =>
             dispatch(
                 createSendActionNameAction(updateVarName, module, {
                     action: onAction,
                     index: getRowIndex(rows[rowIndex], rowIndex, startIndex),
                     col: colName === undefined ? null : colName,
+                    value,
+                    reason: value === undefined ? "click": "button",
                     user_data: userData,
                 })
             ),
@@ -441,6 +457,17 @@ const PaginatedTable = (props: TaipyPaginatedTableProps) => {
                                                             className={className}
                                                         />
                                                     ) : null,
+                                                    active && downloadable ? (
+                                                        <Tooltip title="Download as CSV" key="downloadCsv">
+                                                            <IconButton
+                                                                onClick={onDownload}
+                                                                size="small"
+                                                                sx={iconInRowSx}
+                                                            >
+                                                                <Download fontSize="inherit" />
+                                                            </IconButton>
+                                                        </Tooltip>
+                                                    ) : null,
                                                 ]
                                             ) : (
                                                 <TableSortLabel

+ 99 - 60
frontend/taipy-gui/src/components/Taipy/tableUtils.tsx

@@ -39,7 +39,7 @@ import { isValid } from "date-fns";
 import { FormatConfig } from "../../context/taipyReducers";
 import { dateToString, getDateTime, getDateTimeString, getNumberString, getTimeZonedDate } from "../../utils/index";
 import { TaipyActiveProps, TaipyMultiSelectProps, getSuffixedClassNames } from "./utils";
-import { FilterOptionsState, TextField } from "@mui/material";
+import { Button, FilterOptionsState, TextField } from "@mui/material";
 
 /**
  * A column description as received by the backend.
@@ -128,8 +128,11 @@ export interface TaipyTableProps extends TaipyActiveProps, TaipyMultiSelectProps
     size?: "small" | "medium";
     defaultKey?: string; // for testing purposes only
     userData?: unknown;
+    downloadable?: boolean;
 }
 
+export const DownloadAction = "__Taipy__download_csv";
+
 export type PageSizeOptionsType = (
     | number
     | {
@@ -152,7 +155,7 @@ export const iconInRowSx = { fontSize: "body2.fontSize" };
 export const iconsWrapperSx = { gridColumnStart: 2, display: "flex", alignItems: "center" } as CSSProperties;
 const cellBoxSx = { display: "grid", gridTemplateColumns: "1fr auto", alignItems: "center" } as CSSProperties;
 const tableFontSx = { fontSize: "body2.fontSize" };
-
+const ButtonSx = { minHeight: "unset", mb: "unset", padding: "unset", lineHeight: "unset" };
 export interface OnCellValidation {
     (value: RowValue, rowIndex: number, colName: string, userValue: string, tz?: string): void;
 }
@@ -162,7 +165,7 @@ export interface OnRowDeletion {
 }
 
 export interface OnRowSelection {
-    (rowIndex: number, colName?: string): void;
+    (rowIndex: number, colName?: string, value?: string): void;
 }
 
 export interface OnRowClick {
@@ -217,13 +220,6 @@ const isBooleanTrue = (val: RowValue) =>
 const defaultCursor = { cursor: "default" };
 const defaultCursorIcon = { ...iconInRowSx, "& .MuiSwitch-input": defaultCursor };
 
-const renderCellValue = (val: RowValue | boolean, col: ColumnDesc, formatConf: FormatConfig, nanValue?: string) => {
-    if (val !== null && val !== undefined && col.type && col.type.startsWith("bool")) {
-        return <Switch checked={val as boolean} size="small" title={val ? "True" : "False"} sx={defaultCursorIcon} />;
-    }
-    return <span style={defaultCursor}>{formatValue(val as RowValue, col, formatConf, nanValue)}</span>;
-};
-
 const getCellProps = (col: ColumnDesc, base: Partial<TableCellProps> = {}): Partial<TableCellProps> => {
     switch (col.type) {
         case "bool":
@@ -269,6 +265,8 @@ const filter = createFilterOptions<string>();
 const getOptionKey = (option: string) => (Array.isArray(option) ? option[0] : option);
 const getOptionLabel = (option: string) => (Array.isArray(option) ? option[1] : option);
 
+const onCompleteClose = (evt: SyntheticEvent) => evt.stopPropagation();
+
 export const EditableCell = (props: EditableCellProps) => {
     const {
         onValidation,
@@ -288,55 +286,75 @@ export const EditableCell = (props: EditableCellProps) => {
     const [deletion, setDeletion] = useState(false);
 
     const onChange = useCallback((e: ChangeEvent<HTMLInputElement>) => setVal(e.target.value), []);
-    const onCompleteChange = useCallback((e: SyntheticEvent, value: string | null) => setVal(value), []);
+    const onCompleteChange = useCallback((e: SyntheticEvent, value: string | null) => {
+        e.stopPropagation();
+        setVal(value);
+    }, []);
     const onBoolChange = useCallback((e: ChangeEvent<HTMLInputElement>) => setVal(e.target.checked), []);
     const onDateChange = useCallback((date: Date | null) => setVal(date), []);
 
     const withTime = useMemo(() => !!colDesc.format && colDesc.format.toLowerCase().includes("h"), [colDesc.format]);
 
-    const onCheckClick = useCallback(() => {
-        let castedVal = val;
-        switch (colDesc.type) {
-            case "bool":
-                castedVal = isBooleanTrue(val as RowValue);
-                break;
-            case "int":
-                try {
-                    castedVal = parseInt(val as string, 10);
-                } catch (e) {
-                    // ignore
-                }
-                break;
-            case "float":
-                try {
-                    castedVal = parseFloat(val as string);
-                } catch (e) {
-                    // ignore
-                }
-                break;
-            case "datetime":
-                if (val === null) {
-                    castedVal = val;
-                } else if (isValid(val)) {
-                    castedVal = dateToString(getTimeZonedDate(val as Date, formatConfig.timeZone, withTime), withTime);
-                } else {
-                    return;
-                }
-                break;
+    const button = useMemo(() => {
+        if (onSelection && typeof value == "string" && value.startsWith("[") && value.endsWith(")")) {
+            const parts = value.slice(1, -1).split("](");
+            if (parts.length == 2) {
+                return parts as [string, string];
+            }
         }
-        onValidation &&
-            onValidation(
-                castedVal as RowValue,
-                rowIndex,
-                colDesc.dfid,
-                val as string,
-                colDesc.type == "datetime" ? formatConfig.timeZone : undefined
-            );
-        setEdit((e) => !e);
-    }, [onValidation, val, rowIndex, colDesc.dfid, colDesc.type, formatConfig.timeZone, withTime]);
+        return undefined;
+    }, [value, onSelection]);
+
+    const onCheckClick = useCallback(
+        (evt?: MouseEvent<HTMLElement>) => {
+            evt && evt.stopPropagation();
+            let castVal = val;
+            switch (colDesc.type) {
+                case "bool":
+                    castVal = isBooleanTrue(val as RowValue);
+                    break;
+                case "int":
+                    try {
+                        castVal = parseInt(val as string, 10);
+                    } catch (e) {
+                        // ignore
+                    }
+                    break;
+                case "float":
+                    try {
+                        castVal = parseFloat(val as string);
+                    } catch (e) {
+                        // ignore
+                    }
+                    break;
+                case "datetime":
+                    if (val === null) {
+                        castVal = val;
+                    } else if (isValid(val)) {
+                        castVal = dateToString(
+                            getTimeZonedDate(val as Date, formatConfig.timeZone, withTime),
+                            withTime
+                        );
+                    } else {
+                        return;
+                    }
+                    break;
+            }
+            onValidation &&
+                onValidation(
+                    castVal as RowValue,
+                    rowIndex,
+                    colDesc.dfid,
+                    val as string,
+                    colDesc.type == "datetime" ? formatConfig.timeZone : undefined
+                );
+            setEdit((e) => !e);
+        },
+        [onValidation, val, rowIndex, colDesc.dfid, colDesc.type, formatConfig.timeZone, withTime]
+    );
 
     const onEditClick = useCallback(
-        (evt?: MouseEvent) => {
+        (evt?: MouseEvent<HTMLElement>) => {
             evt && evt.stopPropagation();
             colDesc.type?.startsWith("date")
                 ? setVal(getDateTime(value as string, formatConfig.timeZone, withTime))
@@ -360,10 +378,14 @@ export const EditableCell = (props: EditableCellProps) => {
         [onCheckClick, onEditClick]
     );
 
-    const onDeleteCheckClick = useCallback(() => {
-        onDeletion && onDeletion(rowIndex);
-        setDeletion((d) => !d);
-    }, [onDeletion, rowIndex]);
+    const onDeleteCheckClick = useCallback(
+        (evt?: MouseEvent<HTMLElement>) => {
+            evt && evt.stopPropagation();
+            onDeletion && onDeletion(rowIndex);
+            setDeletion((d) => !d);
+        },
+        [onDeletion, rowIndex]
+    );
 
     const onDeleteClick = useCallback(
         (evt?: MouseEvent) => {
@@ -388,11 +410,11 @@ export const EditableCell = (props: EditableCellProps) => {
     );
 
     const onSelect = useCallback(
-        (e: MouseEvent<HTMLDivElement>) => {
+        (e: MouseEvent<HTMLElement>) => {
             e.stopPropagation();
-            onSelection && onSelection(rowIndex, colDesc.dfid);
+            onSelection && onSelection(rowIndex, colDesc.dfid, button && button[1]);
         },
-        [onSelection, rowIndex, colDesc.dfid]
+        [onSelection, rowIndex, colDesc.dfid, button]
     );
 
     const filterOptions = useCallback(
@@ -487,6 +509,7 @@ export const EditableCell = (props: EditableCellProps) => {
                             freeSolo={!!colDesc.freeLov}
                             value={val as string}
                             onChange={onCompleteChange}
+                            onOpen={onCompleteClose}
                             renderInput={(params) => (
                                 <TextField
                                     {...params}
@@ -498,6 +521,7 @@ export const EditableCell = (props: EditableCellProps) => {
                                     sx={tableFontSx}
                                 />
                             )}
+                            disableClearable={!colDesc.freeLov}
                         />
                         <Box sx={iconsWrapperSx}>
                             <IconButton onClick={onCheckClick} size="small" sx={iconInRowSx}>
@@ -555,8 +579,23 @@ export const EditableCell = (props: EditableCellProps) => {
                 ) : null
             ) : (
                 <Box sx={cellBoxSx} onClick={onSelect}>
-                    {renderCellValue(value, colDesc, formatConfig, nanValue)}
-                    {onValidation ? (
+                    {button ? (
+                        <Button size="small" onClick={onSelect} sx={ButtonSx}>
+                            {formatValue(button[0] as RowValue, colDesc, formatConfig, nanValue)}
+                        </Button>
+                    ) : val !== null && val !== undefined && colDesc.type && colDesc.type.startsWith("bool") ? (
+                        <Switch
+                            checked={val as boolean}
+                            size="small"
+                            title={val ? "True" : "False"}
+                            sx={defaultCursorIcon}
+                        />
+                    ) : (
+                        <span style={defaultCursor}>
+                            {formatValue(val as RowValue, colDesc, formatConfig, nanValue)}
+                        </span>
+                    )}
+                    {onValidation && !button ? (
                         <Box sx={iconsWrapperSx}>
                             <IconButton onClick={onEditClick} size="small" sx={iconInRowSx}>
                                 <EditIcon fontSize="inherit" />

+ 168 - 135
frontend/taipy/package-lock.json

@@ -1,12 +1,12 @@
 {
   "name": "taipy-gui-core",
-  "version": "3.1.0",
+  "version": "3.2.0",
   "lockfileVersion": 3,
   "requires": true,
   "packages": {
     "": {
       "name": "taipy-gui-core",
-      "version": "3.1.0",
+      "version": "3.2.0",
       "hasInstallScript": true,
       "dependencies": {
         "@emotion/react": "^11.10.6",
@@ -41,7 +41,7 @@
       }
     },
     "../../taipy/gui/webapp": {
-      "version": "3.1.0"
+      "version": "3.2.0"
     },
     "node_modules/@aashutoshrathi/word-wrap": {
       "version": "1.2.6",
@@ -595,14 +595,14 @@
       }
     },
     "node_modules/@jridgewell/gen-mapping": {
-      "version": "0.3.4",
-      "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.4.tgz",
-      "integrity": "sha512-Oud2QPM5dHviZNn4y/WhhYKSXksv+1xLEIsNrAbGcFzUN3ubqWRFT5gwPchNc5NuzILOU4tPBDTZ4VwhL8Y7cw==",
+      "version": "0.3.5",
+      "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz",
+      "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==",
       "dev": true,
       "dependencies": {
-        "@jridgewell/set-array": "^1.0.1",
+        "@jridgewell/set-array": "^1.2.1",
         "@jridgewell/sourcemap-codec": "^1.4.10",
-        "@jridgewell/trace-mapping": "^0.3.9"
+        "@jridgewell/trace-mapping": "^0.3.24"
       },
       "engines": {
         "node": ">=6.0.0"
@@ -618,9 +618,9 @@
       }
     },
     "node_modules/@jridgewell/set-array": {
-      "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz",
-      "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==",
+      "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
+      "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
       "dev": true,
       "engines": {
         "node": ">=6.0.0"
@@ -643,9 +643,9 @@
       "dev": true
     },
     "node_modules/@jridgewell/trace-mapping": {
-      "version": "0.3.23",
-      "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.23.tgz",
-      "integrity": "sha512-9/4foRoUKp8s96tSkh8DlAAc5A0Ty8vLXld+l9gjKKY6ckwI8G15f0hskGmuLZu78ZlGa1vtsfOa+lnB4vG6Jg==",
+      "version": "0.3.25",
+      "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
+      "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
       "dev": true,
       "dependencies": {
         "@jridgewell/resolve-uri": "^3.1.0",
@@ -684,14 +684,14 @@
       }
     },
     "node_modules/@mui/base": {
-      "version": "5.0.0-beta.37",
-      "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.37.tgz",
-      "integrity": "sha512-/o3anbb+DeCng8jNsd3704XtmmLDZju1Fo8R2o7ugrVtPQ/QpcqddwKNzKPZwa0J5T8YNW3ZVuHyQgbTnQLisQ==",
+      "version": "5.0.0-beta.38",
+      "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.38.tgz",
+      "integrity": "sha512-AsjD6Y1X5A1qndxz8xCcR8LDqv31aiwlgWMPxFAX/kCKiIGKlK65yMeVZ62iQr/6LBz+9hSKLiD1i4TZdAHKcQ==",
       "dependencies": {
         "@babel/runtime": "^7.23.9",
         "@floating-ui/react-dom": "^2.0.8",
         "@mui/types": "^7.2.13",
-        "@mui/utils": "^5.15.11",
+        "@mui/utils": "^5.15.12",
         "@popperjs/core": "^2.11.8",
         "clsx": "^2.1.0",
         "prop-types": "^15.8.1"
@@ -715,18 +715,18 @@
       }
     },
     "node_modules/@mui/core-downloads-tracker": {
-      "version": "5.15.11",
-      "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.15.11.tgz",
-      "integrity": "sha512-JVrJ9Jo4gyU707ujnRzmE8ABBWpXd6FwL9GYULmwZRtfPg89ggXs/S3MStQkpJ1JRWfdLL6S5syXmgQGq5EDAw==",
+      "version": "5.15.12",
+      "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.15.12.tgz",
+      "integrity": "sha512-brRO+tMFLpGyjEYHrX97bzqeF6jZmKpqqe1rY0LyIHAwP6xRVzh++zSecOQorDOCaZJg4XkGT9xfD+RWOWxZBA==",
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/mui-org"
       }
     },
     "node_modules/@mui/icons-material": {
-      "version": "5.15.11",
-      "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.15.11.tgz",
-      "integrity": "sha512-R5ZoQqnKpd+5Ew7mBygTFLxgYsQHPhgR3TDXSgIHYIjGzYuyPLmGLSdcPUoMdi6kxiYqHlpPj4NJxlbaFD0UHA==",
+      "version": "5.15.12",
+      "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.15.12.tgz",
+      "integrity": "sha512-3BXiDlOd3AexZoEXa/VqpIpVIvosCzjLHsdMWzKMXbZdnBiJjmb9ECdqfjn5SpTClO49qvkKLhkTqdBH3fSFGw==",
       "dependencies": {
         "@babel/runtime": "^7.23.9"
       },
@@ -749,16 +749,16 @@
       }
     },
     "node_modules/@mui/material": {
-      "version": "5.15.11",
-      "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.15.11.tgz",
-      "integrity": "sha512-FA3eEuEZaDaxgN3CgfXezMWbCZ4VCeU/sv0F0/PK5n42qIgsPVD6q+j71qS7/62sp6wRFMHtDMpXRlN+tT/7NA==",
+      "version": "5.15.12",
+      "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.15.12.tgz",
+      "integrity": "sha512-vXJGg6KNKucsvbW6l7w9zafnpOp0CWc0Wx4mDykuABTpQ5QQBnZxP7+oB4yAS1hDZQ1WobbeIl0CjxK4EEahkA==",
       "dependencies": {
         "@babel/runtime": "^7.23.9",
-        "@mui/base": "5.0.0-beta.37",
-        "@mui/core-downloads-tracker": "^5.15.11",
-        "@mui/system": "^5.15.11",
+        "@mui/base": "5.0.0-beta.38",
+        "@mui/core-downloads-tracker": "^5.15.12",
+        "@mui/system": "^5.15.12",
         "@mui/types": "^7.2.13",
-        "@mui/utils": "^5.15.11",
+        "@mui/utils": "^5.15.12",
         "@types/react-transition-group": "^4.4.10",
         "clsx": "^2.1.0",
         "csstype": "^3.1.3",
@@ -793,12 +793,12 @@
       }
     },
     "node_modules/@mui/private-theming": {
-      "version": "5.15.11",
-      "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.15.11.tgz",
-      "integrity": "sha512-jY/696SnSxSzO1u86Thym7ky5T9CgfidU3NFJjguldqK4f3Z5S97amZ6nffg8gTD0HBjY9scB+4ekqDEUmxZOA==",
+      "version": "5.15.12",
+      "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.15.12.tgz",
+      "integrity": "sha512-cqoSo9sgA5HE+8vZClbLrq9EkyOnYysooepi5eKaKvJ41lReT2c5wOZAeDDM1+xknrMDos+0mT2zr3sZmUiRRA==",
       "dependencies": {
         "@babel/runtime": "^7.23.9",
-        "@mui/utils": "^5.15.11",
+        "@mui/utils": "^5.15.12",
         "prop-types": "^15.8.1"
       },
       "engines": {
@@ -850,15 +850,15 @@
       }
     },
     "node_modules/@mui/system": {
-      "version": "5.15.11",
-      "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.15.11.tgz",
-      "integrity": "sha512-9j35suLFq+MgJo5ktVSHPbkjDLRMBCV17NMBdEQurh6oWyGnLM4uhU4QGZZQ75o0vuhjJghOCA1jkO3+79wKsA==",
+      "version": "5.15.12",
+      "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.15.12.tgz",
+      "integrity": "sha512-/pq+GO6yN3X7r3hAwFTrzkAh7K1bTF5r8IzS79B9eyKJg7v6B/t4/zZYMR6OT9qEPtwf6rYN2Utg1e6Z7F1OgQ==",
       "dependencies": {
         "@babel/runtime": "^7.23.9",
-        "@mui/private-theming": "^5.15.11",
+        "@mui/private-theming": "^5.15.12",
         "@mui/styled-engine": "^5.15.11",
         "@mui/types": "^7.2.13",
-        "@mui/utils": "^5.15.11",
+        "@mui/utils": "^5.15.12",
         "clsx": "^2.1.0",
         "csstype": "^3.1.3",
         "prop-types": "^15.8.1"
@@ -902,9 +902,9 @@
       }
     },
     "node_modules/@mui/utils": {
-      "version": "5.15.11",
-      "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.15.11.tgz",
-      "integrity": "sha512-D6bwqprUa9Stf8ft0dcMqWyWDKEo7D+6pB1k8WajbqlYIRA8J8Kw9Ra7PSZKKePGBGWO+/xxrX1U8HpG/aXQCw==",
+      "version": "5.15.12",
+      "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.15.12.tgz",
+      "integrity": "sha512-8SDGCnO2DY9Yy+5bGzu00NZowSDtuyHP4H8gunhHGQoIlhlY2Z3w64wBzAOLpYw/ZhJNzksDTnS/i8qdJvxuow==",
       "dependencies": {
         "@babel/runtime": "^7.23.9",
         "@types/prop-types": "^15.7.11",
@@ -929,9 +929,9 @@
       }
     },
     "node_modules/@mui/x-date-pickers": {
-      "version": "6.19.5",
-      "resolved": "https://registry.npmjs.org/@mui/x-date-pickers/-/x-date-pickers-6.19.5.tgz",
-      "integrity": "sha512-WPi59ImgvGTEuAlJiCLnqNkEkbqPjgwUTrg8iqACFIb4qzg5tz4y8vNgmOKMnTXCwWDHjd+SoDdxiMlZJWT1hg==",
+      "version": "6.19.6",
+      "resolved": "https://registry.npmjs.org/@mui/x-date-pickers/-/x-date-pickers-6.19.6.tgz",
+      "integrity": "sha512-QW9AFcPi0vLpkUhmquhhyhLaBvB0AZJuu3NTrE173qNKx3Z3n51aCLY9bc7c6i4ltZMMsVRHlvzQjsve04TC8A==",
       "dependencies": {
         "@babel/runtime": "^7.23.2",
         "@mui/base": "^5.0.0-beta.22",
@@ -1147,9 +1147,9 @@
       "dev": true
     },
     "node_modules/@types/eslint": {
-      "version": "8.56.4",
-      "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.4.tgz",
-      "integrity": "sha512-lG1GLUnL5vuRBGb3MgWUWLdGMH2Hps+pERuyQXCfWozuGKdnhf9Pbg4pkcrVUHjKrU7Rl+GCZ/299ObBXZFAxg==",
+      "version": "8.56.5",
+      "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.5.tgz",
+      "integrity": "sha512-u5/YPJHo1tvkSF2CE0USEkxon82Z5DBy2xR+qfyYNszpX9qcs4sT6uq2kBbj4BXY1+DBGDPnrhMZV3pKWGNukw==",
       "dev": true,
       "dependencies": {
         "@types/estree": "*",
@@ -1212,9 +1212,9 @@
       "dev": true
     },
     "node_modules/@types/node": {
-      "version": "20.11.21",
-      "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.21.tgz",
-      "integrity": "sha512-/ySDLGscFPNasfqStUuWWPfL78jompfIoVzLJPVVAHBh6rpG68+pI2Gk+fNLeI8/f1yPYL4s46EleVIc20F1Ow==",
+      "version": "20.11.24",
+      "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.24.tgz",
+      "integrity": "sha512-Kza43ewS3xoLgCEpQrsT+xRo/EJej1y0kVYGiLFE1NEODXGzTfwiC6tXTLMQskn1X4/Rjlh0MQUvx9W+L9long==",
       "dev": true,
       "dependencies": {
         "undici-types": "~5.26.4"
@@ -1231,9 +1231,9 @@
       "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng=="
     },
     "node_modules/@types/react": {
-      "version": "18.2.60",
-      "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.60.tgz",
-      "integrity": "sha512-dfiPj9+k20jJrLGOu9Nf6eqxm2EyJRrq2NvwOFsfbb7sFExZ9WELPs67UImHj3Ayxg8ruTtKtNnbjaF8olPq0A==",
+      "version": "18.2.63",
+      "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.63.tgz",
+      "integrity": "sha512-ppaqODhs15PYL2nGUOaOu2RSCCB4Difu4UFrP4I3NHLloXC/ESQzQMi9nvjfT1+rudd0d2L3fQPJxRSey+rGlQ==",
       "dependencies": {
         "@types/prop-types": "*",
         "@types/scheduler": "*",
@@ -1275,16 +1275,16 @@
       "dev": true
     },
     "node_modules/@typescript-eslint/eslint-plugin": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.1.0.tgz",
-      "integrity": "sha512-j6vT/kCulhG5wBmGtstKeiVr1rdXE4nk+DT1k6trYkwlrvW9eOF5ZbgKnd/YR6PcM4uTEXa0h6Fcvf6X7Dxl0w==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.1.1.tgz",
+      "integrity": "sha512-zioDz623d0RHNhvx0eesUmGfIjzrk18nSBC8xewepKXbBvN/7c1qImV7Hg8TI1URTxKax7/zxfxj3Uph8Chcuw==",
       "dev": true,
       "dependencies": {
         "@eslint-community/regexpp": "^4.5.1",
-        "@typescript-eslint/scope-manager": "7.1.0",
-        "@typescript-eslint/type-utils": "7.1.0",
-        "@typescript-eslint/utils": "7.1.0",
-        "@typescript-eslint/visitor-keys": "7.1.0",
+        "@typescript-eslint/scope-manager": "7.1.1",
+        "@typescript-eslint/type-utils": "7.1.1",
+        "@typescript-eslint/utils": "7.1.1",
+        "@typescript-eslint/visitor-keys": "7.1.1",
         "debug": "^4.3.4",
         "graphemer": "^1.4.0",
         "ignore": "^5.2.4",
@@ -1310,15 +1310,15 @@
       }
     },
     "node_modules/@typescript-eslint/parser": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.1.0.tgz",
-      "integrity": "sha512-V1EknKUubZ1gWFjiOZhDSNToOjs63/9O0puCgGS8aDOgpZY326fzFu15QAUjwaXzRZjf/qdsdBrckYdv9YxB8w==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.1.1.tgz",
+      "integrity": "sha512-ZWUFyL0z04R1nAEgr9e79YtV5LbafdOtN7yapNbn1ansMyaegl2D4bL7vHoJ4HPSc4CaLwuCVas8CVuneKzplQ==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/scope-manager": "7.1.0",
-        "@typescript-eslint/types": "7.1.0",
-        "@typescript-eslint/typescript-estree": "7.1.0",
-        "@typescript-eslint/visitor-keys": "7.1.0",
+        "@typescript-eslint/scope-manager": "7.1.1",
+        "@typescript-eslint/types": "7.1.1",
+        "@typescript-eslint/typescript-estree": "7.1.1",
+        "@typescript-eslint/visitor-keys": "7.1.1",
         "debug": "^4.3.4"
       },
       "engines": {
@@ -1338,13 +1338,13 @@
       }
     },
     "node_modules/@typescript-eslint/scope-manager": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.1.0.tgz",
-      "integrity": "sha512-6TmN4OJiohHfoOdGZ3huuLhpiUgOGTpgXNUPJgeZOZR3DnIpdSgtt83RS35OYNNXxM4TScVlpVKC9jyQSETR1A==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.1.1.tgz",
+      "integrity": "sha512-cirZpA8bJMRb4WZ+rO6+mnOJrGFDd38WoXCEI57+CYBqta8Yc8aJym2i7vyqLL1vVYljgw0X27axkUXz32T8TA==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.1.0",
-        "@typescript-eslint/visitor-keys": "7.1.0"
+        "@typescript-eslint/types": "7.1.1",
+        "@typescript-eslint/visitor-keys": "7.1.1"
       },
       "engines": {
         "node": "^16.0.0 || >=18.0.0"
@@ -1355,13 +1355,13 @@
       }
     },
     "node_modules/@typescript-eslint/type-utils": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.1.0.tgz",
-      "integrity": "sha512-UZIhv8G+5b5skkcuhgvxYWHjk7FW7/JP5lPASMEUoliAPwIH/rxoUSQPia2cuOj9AmDZmwUl1usKm85t5VUMew==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.1.1.tgz",
+      "integrity": "sha512-5r4RKze6XHEEhlZnJtR3GYeCh1IueUHdbrukV2KSlLXaTjuSfeVF8mZUVPLovidCuZfbVjfhi4c0DNSa/Rdg5g==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/typescript-estree": "7.1.0",
-        "@typescript-eslint/utils": "7.1.0",
+        "@typescript-eslint/typescript-estree": "7.1.1",
+        "@typescript-eslint/utils": "7.1.1",
         "debug": "^4.3.4",
         "ts-api-utils": "^1.0.1"
       },
@@ -1382,9 +1382,9 @@
       }
     },
     "node_modules/@typescript-eslint/types": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.1.0.tgz",
-      "integrity": "sha512-qTWjWieJ1tRJkxgZYXx6WUYtWlBc48YRxgY2JN1aGeVpkhmnopq+SUC8UEVGNXIvWH7XyuTjwALfG6bFEgCkQA==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.1.1.tgz",
+      "integrity": "sha512-KhewzrlRMrgeKm1U9bh2z5aoL4s7K3tK5DwHDn8MHv0yQfWFz/0ZR6trrIHHa5CsF83j/GgHqzdbzCXJ3crx0Q==",
       "dev": true,
       "engines": {
         "node": "^16.0.0 || >=18.0.0"
@@ -1395,13 +1395,13 @@
       }
     },
     "node_modules/@typescript-eslint/typescript-estree": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.1.0.tgz",
-      "integrity": "sha512-k7MyrbD6E463CBbSpcOnwa8oXRdHzH1WiVzOipK3L5KSML92ZKgUBrTlehdi7PEIMT8k0bQixHUGXggPAlKnOQ==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.1.1.tgz",
+      "integrity": "sha512-9ZOncVSfr+sMXVxxca2OJOPagRwT0u/UHikM2Rd6L/aB+kL/QAuTnsv6MeXtjzCJYb8PzrXarypSGIPx3Jemxw==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.1.0",
-        "@typescript-eslint/visitor-keys": "7.1.0",
+        "@typescript-eslint/types": "7.1.1",
+        "@typescript-eslint/visitor-keys": "7.1.1",
         "debug": "^4.3.4",
         "globby": "^11.1.0",
         "is-glob": "^4.0.3",
@@ -1423,17 +1423,17 @@
       }
     },
     "node_modules/@typescript-eslint/utils": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.1.0.tgz",
-      "integrity": "sha512-WUFba6PZC5OCGEmbweGpnNJytJiLG7ZvDBJJoUcX4qZYf1mGZ97mO2Mps6O2efxJcJdRNpqweCistDbZMwIVHw==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.1.1.tgz",
+      "integrity": "sha512-thOXM89xA03xAE0lW7alstvnyoBUbBX38YtY+zAUcpRPcq9EIhXPuJ0YTv948MbzmKh6e1AUszn5cBFK49Umqg==",
       "dev": true,
       "dependencies": {
         "@eslint-community/eslint-utils": "^4.4.0",
         "@types/json-schema": "^7.0.12",
         "@types/semver": "^7.5.0",
-        "@typescript-eslint/scope-manager": "7.1.0",
-        "@typescript-eslint/types": "7.1.0",
-        "@typescript-eslint/typescript-estree": "7.1.0",
+        "@typescript-eslint/scope-manager": "7.1.1",
+        "@typescript-eslint/types": "7.1.1",
+        "@typescript-eslint/typescript-estree": "7.1.1",
         "semver": "^7.5.4"
       },
       "engines": {
@@ -1448,12 +1448,12 @@
       }
     },
     "node_modules/@typescript-eslint/visitor-keys": {
-      "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.1.0.tgz",
-      "integrity": "sha512-FhUqNWluiGNzlvnDZiXad4mZRhtghdoKW6e98GoEOYSu5cND+E39rG5KwJMUzeENwm1ztYBRqof8wMLP+wNPIA==",
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.1.1.tgz",
+      "integrity": "sha512-yTdHDQxY7cSoCcAtiBzVzxleJhkGB9NncSIyMYe2+OGON1ZsP9zOPws/Pqgopa65jvknOjlk/w7ulPlZ78PiLQ==",
       "dev": true,
       "dependencies": {
-        "@typescript-eslint/types": "7.1.0",
+        "@typescript-eslint/types": "7.1.1",
         "eslint-visitor-keys": "^3.4.1"
       },
       "engines": {
@@ -1840,6 +1840,25 @@
         "node": ">=8"
       }
     },
+    "node_modules/array.prototype.findlast": {
+      "version": "1.2.4",
+      "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.4.tgz",
+      "integrity": "sha512-BMtLxpV+8BD+6ZPFIWmnUBpQoy+A+ujcg4rhp2iwCRJYA7PEh2MS4NL3lz8EiDlLrJPp2hg9qWihr5pd//jcGw==",
+      "dev": true,
+      "dependencies": {
+        "call-bind": "^1.0.5",
+        "define-properties": "^1.2.1",
+        "es-abstract": "^1.22.3",
+        "es-errors": "^1.3.0",
+        "es-shim-unscopables": "^1.0.2"
+      },
+      "engines": {
+        "node": ">= 0.4"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/ljharb"
+      }
+    },
     "node_modules/array.prototype.flat": {
       "version": "1.3.2",
       "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz",
@@ -1876,6 +1895,18 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
+    "node_modules/array.prototype.toreversed": {
+      "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz",
+      "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==",
+      "dev": true,
+      "dependencies": {
+        "call-bind": "^1.0.2",
+        "define-properties": "^1.2.0",
+        "es-abstract": "^1.22.1",
+        "es-shim-unscopables": "^1.0.0"
+      }
+    },
     "node_modules/array.prototype.tosorted": {
       "version": "1.1.3",
       "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.3.tgz",
@@ -2042,9 +2073,9 @@
       }
     },
     "node_modules/caniuse-lite": {
-      "version": "1.0.30001591",
-      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001591.tgz",
-      "integrity": "sha512-PCzRMei/vXjJyL5mJtzNiUCKP59dm8Apqc3PH8gJkMnMXZGox93RbE76jHsmLwmIo6/3nsYIpJtx0O7u5PqFuQ==",
+      "version": "1.0.30001594",
+      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001594.tgz",
+      "integrity": "sha512-VblSX6nYqyJVs8DKFMldE2IVCJjZ225LW00ydtUWwh5hk9IfkTOffO6r8gJNsH0qqqeAF8KrbMYA2VEwTlGW5g==",
       "dev": true,
       "funding": [
         {
@@ -2339,9 +2370,9 @@
       }
     },
     "node_modules/electron-to-chromium": {
-      "version": "1.4.685",
-      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.685.tgz",
-      "integrity": "sha512-yDYeobbTEe4TNooEzOQO6xFqg9XnAkVy2Lod1C1B2it8u47JNLYvl9nLDWBamqUakWB8Jc1hhS1uHUNYTNQdfw==",
+      "version": "1.4.693",
+      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.693.tgz",
+      "integrity": "sha512-/if4Ueg0GUQlhCrW2ZlXwDAm40ipuKo+OgeHInlL8sbjt+hzISxZK949fZeJaVsheamrzANXvw1zQTvbxTvSHw==",
       "dev": true
     },
     "node_modules/enhanced-resolve": {
@@ -2378,18 +2409,18 @@
       }
     },
     "node_modules/es-abstract": {
-      "version": "1.22.4",
-      "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.4.tgz",
-      "integrity": "sha512-vZYJlk2u6qHYxBOTjAeg7qUxHdNfih64Uu2J8QqWgXZ2cri0ZpJAkzDUK/q593+mvKwlxyaxr6F1Q+3LKoQRgg==",
+      "version": "1.22.5",
+      "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.5.tgz",
+      "integrity": "sha512-oW69R+4q2wG+Hc3KZePPZxOiisRIqfKBVo/HLx94QcJeWGU/8sZhCvc829rd1kS366vlJbzBfXf9yWwf0+Ko7w==",
       "dev": true,
       "dependencies": {
         "array-buffer-byte-length": "^1.0.1",
         "arraybuffer.prototype.slice": "^1.0.3",
-        "available-typed-arrays": "^1.0.6",
+        "available-typed-arrays": "^1.0.7",
         "call-bind": "^1.0.7",
         "es-define-property": "^1.0.0",
         "es-errors": "^1.3.0",
-        "es-set-tostringtag": "^2.0.2",
+        "es-set-tostringtag": "^2.0.3",
         "es-to-primitive": "^1.2.1",
         "function.prototype.name": "^1.1.6",
         "get-intrinsic": "^1.2.4",
@@ -2397,15 +2428,15 @@
         "globalthis": "^1.0.3",
         "gopd": "^1.0.1",
         "has-property-descriptors": "^1.0.2",
-        "has-proto": "^1.0.1",
+        "has-proto": "^1.0.3",
         "has-symbols": "^1.0.3",
         "hasown": "^2.0.1",
         "internal-slot": "^1.0.7",
         "is-array-buffer": "^3.0.4",
         "is-callable": "^1.2.7",
-        "is-negative-zero": "^2.0.2",
+        "is-negative-zero": "^2.0.3",
         "is-regex": "^1.1.4",
-        "is-shared-array-buffer": "^1.0.2",
+        "is-shared-array-buffer": "^1.0.3",
         "is-string": "^1.0.7",
         "is-typed-array": "^1.1.13",
         "is-weakref": "^1.0.2",
@@ -2418,10 +2449,10 @@
         "string.prototype.trim": "^1.2.8",
         "string.prototype.trimend": "^1.0.7",
         "string.prototype.trimstart": "^1.0.7",
-        "typed-array-buffer": "^1.0.1",
-        "typed-array-byte-length": "^1.0.0",
-        "typed-array-byte-offset": "^1.0.0",
-        "typed-array-length": "^1.0.4",
+        "typed-array-buffer": "^1.0.2",
+        "typed-array-byte-length": "^1.0.1",
+        "typed-array-byte-offset": "^1.0.2",
+        "typed-array-length": "^1.0.5",
         "unbox-primitive": "^1.0.2",
         "which-typed-array": "^1.1.14"
       },
@@ -2601,27 +2632,29 @@
       }
     },
     "node_modules/eslint-plugin-react": {
-      "version": "7.33.2",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz",
-      "integrity": "sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw==",
+      "version": "7.34.0",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.0.tgz",
+      "integrity": "sha512-MeVXdReleBTdkz/bvcQMSnCXGi+c9kvy51IpinjnJgutl3YTHWsDdke7Z1ufZpGfDG8xduBDKyjtB9JH1eBKIQ==",
       "dev": true,
       "dependencies": {
-        "array-includes": "^3.1.6",
-        "array.prototype.flatmap": "^1.3.1",
-        "array.prototype.tosorted": "^1.1.1",
+        "array-includes": "^3.1.7",
+        "array.prototype.findlast": "^1.2.4",
+        "array.prototype.flatmap": "^1.3.2",
+        "array.prototype.toreversed": "^1.1.2",
+        "array.prototype.tosorted": "^1.1.3",
         "doctrine": "^2.1.0",
-        "es-iterator-helpers": "^1.0.12",
+        "es-iterator-helpers": "^1.0.17",
         "estraverse": "^5.3.0",
         "jsx-ast-utils": "^2.4.1 || ^3.0.0",
         "minimatch": "^3.1.2",
-        "object.entries": "^1.1.6",
-        "object.fromentries": "^2.0.6",
-        "object.hasown": "^1.1.2",
-        "object.values": "^1.1.6",
+        "object.entries": "^1.1.7",
+        "object.fromentries": "^2.0.7",
+        "object.hasown": "^1.1.3",
+        "object.values": "^1.1.7",
         "prop-types": "^15.8.1",
-        "resolve": "^2.0.0-next.4",
+        "resolve": "^2.0.0-next.5",
         "semver": "^6.3.1",
-        "string.prototype.matchall": "^4.0.8"
+        "string.prototype.matchall": "^4.0.10"
       },
       "engines": {
         "node": ">=4"
@@ -4929,12 +4962,12 @@
       }
     },
     "node_modules/side-channel": {
-      "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz",
-      "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==",
+      "version": "1.0.6",
+      "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
+      "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
       "dev": true,
       "dependencies": {
-        "call-bind": "^1.0.6",
+        "call-bind": "^1.0.7",
         "es-errors": "^1.3.0",
         "get-intrinsic": "^1.2.4",
         "object-inspect": "^1.13.1"
@@ -5113,9 +5146,9 @@
       }
     },
     "node_modules/terser": {
-      "version": "5.28.1",
-      "resolved": "https://registry.npmjs.org/terser/-/terser-5.28.1.tgz",
-      "integrity": "sha512-wM+bZp54v/E9eRRGXb5ZFDvinrJIOaTapx3WUokyVGZu5ucVCK55zEgGd5Dl2fSr3jUo5sDiERErUWLY6QPFyA==",
+      "version": "5.29.1",
+      "resolved": "https://registry.npmjs.org/terser/-/terser-5.29.1.tgz",
+      "integrity": "sha512-lZQ/fyaIGxsbGxApKmoPTODIzELy3++mXhS5hOqaAWZjQtpq/hFHAc+rm29NND1rYRxRWKcjuARNwULNXa5RtQ==",
       "dev": true,
       "dependencies": {
         "@jridgewell/source-map": "^0.3.3",

+ 1 - 1
frontend/taipy/package.json

@@ -1,6 +1,6 @@
 {
   "name": "taipy-gui-core",
-  "version": "3.1.0",
+  "version": "3.2.0",
   "private": true,
   "devDependencies": {
     "@types/react": "^18.0.15",

+ 3 - 6
frontend/taipy/src/DataNodeViewer.tsx

@@ -303,11 +303,8 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
                 }
                 return false;
             });
-            setDataRequested((req) => {
-                if (!isNewDn) {
-                    return req;
-                }
-                if (req && tabValue == TabValues.Data) {
+            setDataRequested(() => {
+                if (tabValue == TabValues.Data) {
                     dispatch(
                         createSendActionNameAction(id, module, props.onIdSelect, {
                             data_id: newDnId,
@@ -420,7 +417,7 @@ const DataNodeViewer = (props: DataNodeViewerProps) => {
 
     // Datanode data
     const dtType = props.data && props.data[DatanodeDataProps.type];
-    const dtValue = (props.data && props.data[DatanodeDataProps.value]) ?? ((dtType == "float") ? null : undefined);
+    const dtValue = (props.data && props.data[DatanodeDataProps.value]) ?? (dtType == "float" ? null : undefined);
     const dtTabular = (props.data && props.data[DatanodeDataProps.tabular]) ?? false;
     const dtError = props.data && props.data[DatanodeDataProps.error];
     const [dataValue, setDataValue] = useState<RowValue | Date>();

+ 8 - 3
frontend/taipy/src/ScenarioDag.tsx

@@ -14,6 +14,7 @@ import { DisplayModel, TaskStatuses } from "./utils/types";
 import { addStatusToDisplayModel, createDagreEngine, initDiagram, populateModel, relayoutDiagram } from "./utils/diagram";
 import {
     createRequestUpdateAction,
+    createSendActionNameAction,
     createSendUpdateAction,
     getUpdateVar,
     useDispatch,
@@ -38,6 +39,8 @@ interface ScenarioDagProps {
     libClassName?: string;
     className?: string;
     dynamicClassName?: string;
+    onAction?: string;
+    onSelect?: string;
 }
 
 const titleSx = { ml: 2, flex: 1 };
@@ -67,7 +70,7 @@ const getValidScenario = (scenar: DisplayModel | DisplayModel[]) =>
         : undefined;
 
 const ScenarioDag = (props: ScenarioDagProps) => {
-    const { showToolbar = true } = props;
+    const { showToolbar = true, onSelect, onAction } = props;
     const [scenarioId, setScenarioId] = useState("");
     const [engine] = useState(createEngine);
     const [dagreEngine] = useState(createDagreEngine);
@@ -121,8 +124,10 @@ const ScenarioDag = (props: ScenarioDagProps) => {
 
     const zoomToFit = useCallback(() => engine.zoomToFit(), [engine]);
 
+    const onClick = useCallback((id: string) => onAction && dispatch(createSendActionNameAction(props.id, module, onSelect, id, onAction)), [props.id, onAction, onSelect, module, dispatch]);
+
     useEffect(() => {
-        const model = new TaipyDiagramModel();
+        const model = new TaipyDiagramModel(onClick);
         initDiagram(engine);
         let doLayout = false;
         if (displayModel) {
@@ -135,7 +140,7 @@ const ScenarioDag = (props: ScenarioDagProps) => {
         //engine.getActionEventBus().registerAction(new DeleteItemsAction({ keyCodes: [1] }));
         model.setLocked(true);
         doLayout && setTimeout(relayout, 500);
-    }, [displayModel, engine, relayout]);
+    }, [displayModel, engine, relayout, onClick]);
 
     useEffect(() => {
         const showVar = getUpdateVar(props.updateVars, "show");

+ 4 - 0
frontend/taipy/src/projectstorm/NodeWidget.tsx

@@ -86,6 +86,7 @@ namespace S {
 interface NodeProps {
     node: TaipyNodeModel;
     engine: DiagramEngine;
+    onAction?: string;
 }
 
 const getStatusLabel = (status?: TaskStatus) => status == TaskStatus.Running ? "Running" : status == TaskStatus.Pending ? "Pending" : undefined
@@ -110,6 +111,8 @@ const NodeWidget = ({ node, engine }: NodeProps) => {
         [engine]
     );
 
+    const onClick = useCallback(() => node.onClick && node.onClick(node.getID()), [node]);
+
     return (
         <S.Node
             data-default-node-name={node.getOptions().name}
@@ -117,6 +120,7 @@ const NodeWidget = ({ node, engine }: NodeProps) => {
             background={node.getOptions().color}
             title={getStatusLabel(node.status)}
             $status={node.status}
+            onClick={onClick}
         >
             <S.Title>
                 <S.TitleIcon className="icon" title={node.getType()}>

+ 13 - 4
frontend/taipy/src/projectstorm/models.ts

@@ -16,21 +16,30 @@ import { DefaultNodeModel, DefaultNodeModelOptions, DefaultPortModel, DefaultPor
 import { IN_PORT_NAME, OUT_PORT_NAME } from "../utils/diagram";
 import { getChildType } from "../utils/childtype";
 import { DataNode, Task } from "../utils/names";
-import { TaskStatus } from "../utils/types";
+import { OnClick, TaskStatus } from "../utils/types";
 
-export class TaipyDiagramModel extends DiagramModel {}
+export class TaipyDiagramModel extends DiagramModel {
+    onClick?: OnClick;
+    constructor(onClick?: OnClick) {
+        super();
+        this.onClick = onClick
+    }
+}
 
 export interface TaipyNodeModelOptions extends DefaultNodeModelOptions {
     subtype?: string;
     status?: TaskStatus;
+    onClick?: OnClick;
 }
 export class TaipyNodeModel extends DefaultNodeModel {
-    subtype: string | undefined;
-    status: TaskStatus | undefined;
+    subtype?: string;
+    status?: TaskStatus;
+    onClick?: OnClick;
     constructor(options?: TaipyNodeModelOptions) {
         super(options);
         this.subtype = options?.subtype;
         this.status = options?.status
+        this.onClick = options?.onClick;
     }
 }
 

+ 4 - 3
frontend/taipy/src/utils/diagram.ts

@@ -26,7 +26,7 @@ import { getNodeColor } from "./config";
 import { TaipyDiagramModel, TaipyNodeModel } from "../projectstorm/models";
 import { TaipyNodeFactory, TaipyPortFactory } from "../projectstorm/factories";
 import { nodeTypes } from "./config";
-import { DisplayModel, TaskStatus, TaskStatuses } from "./types";
+import { DisplayModel, OnClick, TaskStatus, TaskStatuses } from "./types";
 
 export const createDagreEngine = () =>
     new DagreEngine({
@@ -59,7 +59,7 @@ export const getLinkId = (link: LinkModel) =>
     )}`;
 export const getNodeId = (node: DefaultNodeModel) => `${node.getType()}.${node.getID()}`;
 
-export const createNode = (nodeType: string, id: string, name: string, subtype: string, status?: TaskStatus) =>
+export const createNode = (nodeType: string, id: string, name: string, subtype: string, status?: TaskStatus, onClick?: OnClick) =>
     new TaipyNodeModel({
         id: id,
         type: nodeType,
@@ -67,6 +67,7 @@ export const createNode = (nodeType: string, id: string, name: string, subtype:
         color: getNodeColor(nodeType),
         subtype: subtype,
         status: status,
+        onClick: onClick,
     });
 
 export const createLink = (outPort: DefaultPortModel, inPort: DefaultPortModel) =>
@@ -147,7 +148,7 @@ export const populateModel = (displayModel: DisplayModel, model: TaipyDiagramMod
     displayModel[1] &&
         Object.entries(displayModel[1]).forEach(([nodeType, n]) => {
             Object.entries(n).forEach(([id, detail]) => {
-                const node = createNode(nodeType, id, detail.name, detail.type, detail.status);
+                const node = createNode(nodeType, id, detail.name, detail.type, detail.status, model.onClick);
                 nodeModels[nodeType] = nodeModels[nodeType] || {};
                 nodeModels[nodeType][id] = node;
             });

+ 4 - 0
frontend/taipy/src/utils/types.ts

@@ -27,3 +27,7 @@ export enum TaskStatus {
 }
 
 export type TaskStatuses = Record<string, TaskStatus>;
+
+export interface OnClick {
+    (id: string): void;
+}

+ 0 - 2
pyproject.toml

@@ -27,10 +27,8 @@ select = [
     "I001", # isort import order
 ]
 ignore = [      # TODO: to be removed
-    "C401",  # Unnecessary generator (rewrite as a `set` comprehension)
     "C408",  # Unnecessary dict call - rewrite as a literal
     "C409",  # Unnecessary list passed to tuple() - rewrite as a tuple literal
-    "C416",  # Unnecessary `set` comprehension (rewrite using `set()`)
 ]
 
 # Allow fix for all enabled rules (when `--fix`) is provided.

+ 1 - 1
taipy/config/version.json

@@ -1 +1 @@
-{"major": 3, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 3, "minor": 2, "patch": 0, "ext": "dev0"}

+ 9 - 5
taipy/core/_core.py

@@ -62,6 +62,7 @@ class Core:
 
         self._manage_version_and_block_config()
         self.__start_dispatcher(force_restart)
+        self.__logger.info("Core service has been started.")
 
     def stop(self, wait: bool = True, timeout: Optional[float] = None):
         """
@@ -72,18 +73,17 @@ class Core:
             wait (bool): If True, the method will wait for the dispatcher to stop.
             timeout (Optional[float]): The maximum time to wait. If None, the method will wait indefinitely.
         """
-        self.__logger.info("Unblocking configuration update.")
+        self.__logger.info("Unblocking configuration update...")
         Config.unblock_update()
 
+        self.__logger.info("Stopping job dispatcher...")
         if self._dispatcher:
             self._dispatcher = _OrchestratorFactory._remove_dispatcher(wait, timeout)
-            self.__logger.info("Core service has been stopped.")
-
         with self.__class__.__lock_is_running:
             self.__class__._is_running = False
-
         with self.__class__.__lock_version_is_initialized:
             self.__class__._version_is_initialized = False
+        self.__logger.info("Core service has been stopped.")
 
     @classmethod
     def _manage_version_and_block_config(cls):
@@ -102,11 +102,13 @@ class Core:
 
     @classmethod
     def __update_core_section(cls):
+        cls.__logger.info("Updating configuration with command-line arguments...")
         _CoreCLI.create_parser()
         Config._applied_config._unique_sections[CoreSection.name]._update(_CoreCLI.parse_arguments())
 
     @classmethod
     def __manage_version(cls):
+        cls.__logger.info("Managing application's version...")
         _VersionManagerFactory._build_manager()._manage_version()
         Config._applied_config._unique_sections[CoreSection.name]._update(
             {"version_number": _VersionManagerFactory._build_manager()._get_latest_version()}
@@ -114,12 +116,14 @@ class Core:
 
     @classmethod
     def __check_and_block_config(cls):
+        cls.__logger.info("Checking application's version...")
         Config.check()
-        cls.__logger.info("Blocking configuration update.")
+        cls.__logger.info("Blocking configuration update...")
         Config.block_update()
         _init_backup_file_with_storage_folder()
 
     def __start_dispatcher(self, force_restart):
+        self.__logger.info("Starting job dispatcher...")
         if self._orchestrator is None:
             self._orchestrator = _OrchestratorFactory._build_orchestrator()
 

+ 2 - 3
taipy/core/_core_cli.py

@@ -113,6 +113,5 @@ class _CoreCLI:
     @classmethod
     def __add_taipy_prefix(cls, key: str):
         if key.startswith("--no-"):
-            return key[:5] + "taipy-" + key[5:]
-
-        return key[:2] + "taipy-" + key[2:]
+            return f"{key[:5]}taipy-{key[5:]}"
+        return f"{key[:2]}taipy-{key[2:]}"

+ 4 - 8
taipy/core/_entity/_dag.py

@@ -31,7 +31,7 @@ class _Edge:
 
 class _DAG:
     def __init__(self, dag: nx.DiGraph):
-        self._sorted_nodes = [nodes for nodes in nx.topological_generations(dag)]
+        self._sorted_nodes = list(nx.topological_generations(dag))
         self._length, self._width = self.__compute_size()
         self._grid_length, self._grid_width = self.__compute_grid_size()
         self._nodes = self.__compute_nodes()
@@ -54,7 +54,7 @@ class _DAG:
         return self._edges
 
     def __compute_size(self) -> Tuple[int, int]:
-        return len(self._sorted_nodes), max([len(i) for i in self._sorted_nodes])
+        return len(self._sorted_nodes), max(len(i) for i in self._sorted_nodes)
 
     def __compute_grid_size(self) -> Tuple[int, int]:
         if self._width == 1:
@@ -65,8 +65,7 @@ class _DAG:
 
     def __compute_nodes(self) -> Dict[str, _Node]:
         nodes = {}
-        x = 0
-        for same_lvl_nodes in self._sorted_nodes:
+        for x, same_lvl_nodes in enumerate(self._sorted_nodes):
             lcl_wdt = len(same_lvl_nodes)
             is_max = lcl_wdt != self.width
             if self.width != 1:
@@ -81,10 +80,7 @@ class _DAG:
         return nodes
 
     def __compute_edges(self, dag) -> List[_Edge]:
-        edges = []
-        for edge in dag.edges():
-            edges.append(_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id]))
-        return edges
+        return [_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id]) for edge in dag.edges()]
 
     @staticmethod
     def __lcm(*integers) -> int:

+ 1 - 1
taipy/core/_entity/_entity.py

@@ -23,7 +23,7 @@ class _Entity:
 
     def __enter__(self):
         self._is_in_context = True
-        self._in_context_attributes_changed_collector = list()
+        self._in_context_attributes_changed_collector = []
         return self
 
     def __exit__(self, exc_type, exc_value, exc_traceback):

+ 5 - 11
taipy/core/_entity/_labeled.py

@@ -52,26 +52,20 @@ class _Labeled:
         return self.__LABEL_SEPARATOR.join(ls)
 
     def _get_explicit_label(self) -> Optional[str]:
-        if hasattr(self, "_properties"):
-            return getattr(self, "_properties").get("label")  # noqa: B009
-        return None
+        return self._properties.get("label") if hasattr(self, "_properties") else None
 
     def _get_owner_id(self) -> Optional[str]:
-        if hasattr(self, "owner_id"):
-            return getattr(self, "owner_id")  # noqa: B009
-        return None
+        return self.owner_id if hasattr(self, "owner_id") else None
 
     def _get_name(self) -> Optional[str]:
         if hasattr(self, "name"):
-            return getattr(self, "name")  # noqa: B009
+            return self.name
         if hasattr(self, "_properties"):
-            return getattr(self, "_properties").get("name")  # noqa: B009
+            return self._properties.get("name")
         return None
 
     def _get_config_id(self) -> Optional[str]:
-        if hasattr(self, "config_id"):
-            return getattr(self, "config_id")  # noqa: B009
-        return None
+        return self.config_id if hasattr(self, "config_id") else None
 
     def _generate_entity_label(self) -> str:
         if name := self._get_name():

+ 2 - 2
taipy/core/_entity/_migrate/_utils.py

@@ -60,8 +60,8 @@ def __search_parent_config(entity_id: str, config: Dict, entity_type: str) -> Li
     possible_parents = "TASK" if entity_type == "DATA_NODE" else "SCENARIO"
     data = config[possible_parents]
 
+    section_id = f"{entity_id}:SECTION"
     for _id, entity_data in data.items():
-        section_id = f"{entity_id}:SECTION"
         if entity_type == "DATANODE" and possible_parents == "TASK":
             if section_id in entity_data["input_ids"] or section_id in entity_data["output_ids"]:
                 parents.append(section_id)
@@ -281,7 +281,7 @@ def __migrate_entities(entity_type: str, data: Dict) -> Dict:
     _entities = {k: data[k] for k in data if entity_type in k}
 
     for k, v in _entities.items():
-        if entity_type in ["JOB", "VERSION"]:
+        if entity_type in {"JOB", "VERSION"}:
             v["data"] = migration_fct(v["data"])  # type: ignore
         else:
             v["data"] = migration_fct(v["data"], data)  # type: ignore

+ 1 - 1
taipy/core/_entity/_migrate_cli.py

@@ -78,7 +78,7 @@ class _MigrateCLI:
         if args.remove_backup:
             cls.__handle_remove_backup(repository_type, repository_args)
 
-        do_backup = False if args.skip_backup else True
+        do_backup =  not args.skip_backup
         cls.__migrate_entities(repository_type, repository_args, do_backup)
         sys.exit(0)
 

+ 5 - 8
taipy/core/_entity/_reload.py

@@ -22,10 +22,10 @@ class _Reloader:
 
     _no_reload_context = False
 
-    def __new__(class_, *args, **kwargs):
-        if not isinstance(class_._instance, class_):
-            class_._instance = object.__new__(class_, *args, **kwargs)
-        return class_._instance
+    def __new__(cls, *args, **kwargs):
+        if not isinstance(cls._instance, cls):
+            cls._instance = object.__new__(cls, *args, **kwargs)
+        return cls._instance
 
     def _reload(self, manager: str, obj):
         if self._no_reload_context:
@@ -66,10 +66,7 @@ def _self_setter(manager):
         def _do_set_entity(self, *args, **kwargs):
             fct(self, *args, **kwargs)
             entity_manager = _get_manager(manager)
-            if len(args) == 1:
-                value = args[0]
-            else:
-                value = args
+            value = args[0] if len(args) == 1 else args
             event = _make_event(
                 self,
                 EventOperation.UPDATE,

+ 3 - 4
taipy/core/_entity/submittable.py

@@ -139,8 +139,7 @@ class Submittable:
     def _remove_subscriber(self, callback: Callable, params: Optional[List[Any]] = None):
         if params is not None:
             self._subscribers.remove(_Subscriber(callback, params))
-        else:
-            elem = [x for x in self._subscribers if x.callback == callback]
-            if not elem:
-                raise ValueError
+        elif elem := [x for x in self._subscribers if x.callback == callback]:
             self._subscribers.remove(elem[0])
+        else:
+            raise ValueError

+ 10 - 0
taipy/core/_orchestrator/_abstract_orchestrator.py

@@ -21,6 +21,16 @@ from ..task.task import Task
 class _AbstractOrchestrator:
     """Creates, enqueues, and orchestrates jobs as instances of `Job^` class."""
 
+    @property
+    @abstractmethod
+    def jobs_to_run(self):
+        pass
+
+    @property
+    @abstractmethod
+    def blocked_jobs(self):
+        pass
+
     @classmethod
     @abstractmethod
     def initialize(cls):

+ 3 - 0
taipy/core/_orchestrator/_dispatcher/_development_job_dispatcher.py

@@ -23,6 +23,9 @@ class _DevelopmentJobDispatcher(_JobDispatcher):
     def __init__(self, orchestrator: _AbstractOrchestrator):
         super().__init__(orchestrator)
 
+    def _can_execute(self) -> bool:
+        return True
+
     def start(self):
         raise NotImplementedError
 

+ 13 - 18
taipy/core/_orchestrator/_dispatcher/_job_dispatcher.py

@@ -13,7 +13,7 @@ import threading
 import time
 from abc import abstractmethod
 from queue import Empty
-from typing import Dict, Optional
+from typing import Optional
 
 from taipy.config.config import Config
 from taipy.logger._taipy_logger import _TaipyLogger
@@ -29,9 +29,9 @@ class _JobDispatcher(threading.Thread):
     """Manages job dispatching (instances of `Job^` class) on executors."""
 
     _STOP_FLAG = False
-    _dispatched_processes: Dict = {}
+    stop_wait = True
+    stop_timeout = None
     _logger = _TaipyLogger._get_logger()
-    _nb_available_workers: int = 1
 
     def __init__(self, orchestrator: _AbstractOrchestrator):
         threading.Thread.__init__(self, name="Thread-Taipy-JobDispatcher")
@@ -55,13 +55,12 @@ class _JobDispatcher(threading.Thread):
             wait (bool): If True, the method will wait for the dispatcher to stop.
             timeout (Optional[float]): The maximum time to wait. If None, the method will wait indefinitely.
         """
+        self.stop_wait = wait
+        self.stop_timeout = timeout
         self._STOP_FLAG = True
-        if wait and self.is_alive():
-            self._logger.debug("Waiting for the dispatcher thread to stop...")
-            self.join(timeout=timeout)
 
     def run(self):
-        self._logger.info("Start job dispatcher...")
+        self._logger.debug("Job dispatcher started.")
         while not self._STOP_FLAG:
             try:
                 if self._can_execute():
@@ -77,11 +76,15 @@ class _JobDispatcher(threading.Thread):
             except Exception as e:
                 self._logger.exception(e)
                 pass
-        self._logger.info("Job dispatcher stopped.")
+        if self.stop_wait:
+            self._logger.debug("Waiting for the dispatcher thread to stop...")
+            self.join(timeout=self.stop_timeout)
+        self._logger.debug("Job dispatcher stopped.")
 
+    @abstractmethod
     def _can_execute(self) -> bool:
-        """Returns True if the dispatcher have resources to execute a new job."""
-        return self._nb_available_workers > 0
+        """Returns True if the dispatcher have resources to dispatch a new job."""
+        raise NotImplementedError
 
     def _execute_job(self, job: Job):
         if job.force or self._needs_to_run(job.task):
@@ -141,11 +144,3 @@ class _JobDispatcher(threading.Thread):
     def _update_job_status(job: Job, exceptions):
         job.update_status(exceptions)
         _JobManagerFactory._build_manager()._set(job)
-
-    @classmethod
-    def _set_dispatched_processes(cls, job_id, process):
-        cls._dispatched_processes[job_id] = process
-
-    @classmethod
-    def _pop_dispatched_process(cls, job_id, default=None):
-        return cls._dispatched_processes.pop(job_id, default)  # type: ignore

+ 5 - 3
taipy/core/_orchestrator/_dispatcher/_standalone_job_dispatcher.py

@@ -34,10 +34,14 @@ class _StandaloneJobDispatcher(_JobDispatcher):
         )  # type: ignore
         self._nb_available_workers = self._executor._max_workers  # type: ignore
 
+    def _can_execute(self) -> bool:
+        """Returns True if the dispatcher have resources to dispatch a job."""
+        return self._nb_available_workers > 0
+
     def run(self):
         with self._executor:
             super().run()
-        self._logger.info("Standalone job dispatcher: Pool executor shut down")
+        self._logger.debug("Standalone job dispatcher: Pool executor shut down")
 
     def _dispatch(self, job: Job):
         """Dispatches the given `Job^` on an available worker for execution.
@@ -50,7 +54,6 @@ class _StandaloneJobDispatcher(_JobDispatcher):
         config_as_string = _TomlSerializer()._serialize(Config._applied_config)  # type: ignore[attr-defined]
         future = self._executor.submit(_TaskFunctionWrapper(job.id, job.task), config_as_string=config_as_string)
 
-        self._set_dispatched_processes(job.id, future)  # type: ignore
         future.add_done_callback(self._release_worker)  # We must release the worker before updating the job status
         # so that the worker is available for another job as soon as possible.
         future.add_done_callback(partial(self._update_job_status_from_future, job))
@@ -59,5 +62,4 @@ class _StandaloneJobDispatcher(_JobDispatcher):
         self._nb_available_workers += 1
 
     def _update_job_status_from_future(self, job: Job, ft):
-        self._pop_dispatched_process(job.id)  # type: ignore
         self._update_job_status(job, ft.result())

+ 1 - 2
taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py

@@ -38,8 +38,7 @@ class _TaskFunctionWrapper:
     def execute(self, **kwargs):
         """Execute the wrapped function. If `config_as_string` is given, then it will be reapplied to the config."""
         try:
-            config_as_string = kwargs.pop("config_as_string", None)
-            if config_as_string:
+            if config_as_string := kwargs.pop("config_as_string", None):
                 Config._applied_config._update(_TomlSerializer()._deserialize(config_as_string))
                 Config.block_update()
 

+ 23 - 23
taipy/core/_orchestrator/_orchestrator.py

@@ -37,6 +37,7 @@ class _Orchestrator(_AbstractOrchestrator):
 
     jobs_to_run: Queue = Queue()
     blocked_jobs: List = []
+
     lock = Lock()
     __logger = _TaipyLogger._get_logger()
 
@@ -80,23 +81,22 @@ class _Orchestrator(_AbstractOrchestrator):
         tasks = submittable._get_sorted_tasks()
         with cls.lock:
             for ts in tasks:
-                for task in ts:
-                    jobs.append(
-                        cls._lock_dn_output_and_create_job(
-                            task,
-                            submission.id,
-                            submission.entity_id,
-                            callbacks=itertools.chain([cls._update_submission_status], callbacks or []),
-                            force=force,  # type: ignore
-                        )
+                jobs.extend(
+                    cls._lock_dn_output_and_create_job(
+                        task,
+                        submission.id,
+                        submission.entity_id,
+                        callbacks=itertools.chain([cls._update_submission_status], callbacks or []),
+                        force=force,  # type: ignore
                     )
+                    for task in ts
+                )
         submission.jobs = jobs  # type: ignore
         cls._orchestrate_job_to_run_or_block(jobs)
         if Config.job_config.is_development:
             cls._check_and_execute_jobs_if_development_mode()
-        else:
-            if wait:
-                cls._wait_until_job_finished(jobs, timeout=timeout or 0)
+        elif wait:
+            cls._wait_until_job_finished(jobs, timeout=timeout or 0)
         return submission
 
     @classmethod
@@ -157,11 +157,14 @@ class _Orchestrator(_AbstractOrchestrator):
     ) -> Job:
         for dn in task.output.values():
             dn.lock_edit()
-        job = _JobManagerFactory._build_manager()._create(
-            task, itertools.chain([cls._on_status_change], callbacks or []), submit_id, submit_entity_id, force=force
+        return _JobManagerFactory._build_manager()._create(
+            task,
+            itertools.chain([cls._on_status_change], callbacks or []),
+            submit_id,
+            submit_entity_id,
+            force=force
         )
 
-        return job
 
     @classmethod
     def _update_submission_status(cls, job: Job):
@@ -196,7 +199,7 @@ class _Orchestrator(_AbstractOrchestrator):
         while __check_if_timeout(start, timeout) and index < len(jobs):
             try:
                 if jobs[index]._is_finished():
-                    index = index + 1
+                    index += 1
                 else:
                     sleep(0.5)  # Limit CPU usage
             except Exception:
@@ -307,20 +310,17 @@ class _Orchestrator(_AbstractOrchestrator):
 
     @classmethod
     def _cancel_jobs(cls, job_id_to_cancel: JobId, jobs: Set[Job]):
-        from ._orchestrator_factory import _OrchestratorFactory
-
         for job in jobs:
-            if job.id in _OrchestratorFactory._dispatcher._dispatched_processes.keys():  # type: ignore
+            if job.is_running():
                 cls.__logger.info(f"{job.id} is running and cannot be canceled.")
             elif job.is_completed():
                 cls.__logger.info(f"{job.id} has already been completed and cannot be canceled.")
             elif job.is_skipped():
                 cls.__logger.info(f"{job.id} has already been skipped and cannot be canceled.")
+            elif job_id_to_cancel == job.id:
+                job.canceled()
             else:
-                if job_id_to_cancel == job.id:
-                    job.canceled()
-                else:
-                    job.abandoned()
+                job.abandoned()
 
     @staticmethod
     def _check_and_execute_jobs_if_development_mode():

+ 1 - 2
taipy/core/_repository/_base_taipy_model.py

@@ -24,8 +24,7 @@ class _BaseModel:
     __table__: Table
 
     def __iter__(self):
-        for attr, value in self.__dict__.items():
-            yield attr, value
+        yield from self.__dict__.items()
 
     def to_dict(self) -> Dict[str, Any]:
         model_dict = {**dataclasses.asdict(self)}  # type: ignore[call-overload]

+ 4 - 5
taipy/core/_repository/_encoder.py

@@ -27,14 +27,13 @@ class _Encoder(json.JSONEncoder):
 
     def default(self, o: Any):
         if isinstance(o, Enum):
-            result = o.value
+            return o.value
         elif isinstance(o, datetime):
-            result = {"__type__": "Datetime", "__value__": o.isoformat()}
+            return {"__type__": "Datetime", "__value__": o.isoformat()}
         elif isinstance(o, timedelta):
-            result = {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)}
+            return {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)}
         else:
-            result = json.JSONEncoder.default(self, o)
-        return result
+            return json.JSONEncoder.default(self, o)
 
 
 def dumps(d):

+ 2 - 6
taipy/core/_repository/_filesystem_repository.py

@@ -170,10 +170,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
     def _get_by_config_and_owner_id(
         self, config_id: str, owner_id: Optional[str], filters: Optional[List[Dict]] = None
     ) -> Optional[Entity]:
-        if not filters:
-            filters = [{}]
-        else:
-            filters = copy.deepcopy(filters)
+        filters = [{}] if not filters else copy.deepcopy(filters)
 
         if owner_id is not None:
             for fil in filters:
@@ -225,8 +222,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
         if isinstance(file_content, str):
             file_content = json.loads(file_content, cls=_Decoder)
         model = self.model_type.from_dict(file_content)
-        entity = self.converter._model_to_entity(model)
-        return entity
+        return self.converter._model_to_entity(model)
 
     def __filter_by(self, filepath: pathlib.Path, filters: Optional[List[Dict]]) -> Optional[Json]:
         if not filters:

+ 2 - 1
taipy/core/data/_abstract_sql.py

@@ -117,6 +117,7 @@ class _AbstractSQLDataNode(DataNode, _AbstractTabularDataNode):
             editor_expiration_date,
             **properties,
         )
+        _AbstractTabularDataNode.__init__(self, **properties)
         self._engine = None
         if not self._last_edit_date:  # type: ignore
             self._last_edit_date = datetime.now()
@@ -230,7 +231,7 @@ class _AbstractSQLDataNode(DataNode, _AbstractTabularDataNode):
 
             # On pandas 1.3.5 there's a bug that makes that the dataframe from sqlalchemy query is
             # created without headers
-            keys = [col for col in result.keys()]
+            keys = list(result.keys())
             if columns:
                 return pd.DataFrame(result, columns=keys)[columns]
             return pd.DataFrame(result, columns=keys)

+ 9 - 6
taipy/core/data/parquet.py

@@ -158,6 +158,8 @@ class ParquetDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode)
             editor_expiration_date,
             **properties,
         )
+        _AbstractTabularDataNode.__init__(self, **properties)
+
         self._path = properties.get(self.__PATH_KEY, properties.get(self.__DEFAULT_PATH_KEY))
 
         if self._path and ".data" in self._path:
@@ -249,13 +251,14 @@ class ParquetDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode)
         }
         kwargs.update(self.properties[self.__WRITE_KWARGS_PROPERTY])
         kwargs.update(write_kwargs)
-        if isinstance(data, pd.DataFrame):
-            data.to_parquet(self._path, **kwargs)
+        if isinstance(data, pd.Series):
+            df = pd.DataFrame(data)
         else:
-            _df = pd.DataFrame(data)
-            # Ensure that the columns are strings, otherwise writing will fail with pandas 1.3.5
-            _df.columns = _df.columns.astype(str)
-            _df.to_parquet(self._path, **kwargs)
+            df = self._convert_data_to_dataframe(self.properties[self._EXPOSED_TYPE_PROPERTY], data)
+
+        # Ensure that the columns are strings, otherwise writing will fail with pandas 1.3.5
+        df.columns = df.columns.astype(str)
+        df.to_parquet(self._path, **kwargs)
         self.track_edit(timestamp=datetime.now(), job_id=job_id)
 
     def read_with_kwargs(self, **read_kwargs):

+ 8 - 38
taipy/core/data/sql_table.py

@@ -10,9 +10,8 @@
 # specific language governing permissions and limitations under the License.
 
 from datetime import datetime, timedelta
-from typing import Any, Dict, List, Optional, Set, Tuple, Union
+from typing import Any, Dict, List, Optional, Set
 
-import numpy as np
 import pandas as pd
 from sqlalchemy import MetaData, Table
 
@@ -123,26 +122,12 @@ class SQLTableDataNode(_AbstractSQLDataNode):
 
     def __insert_data(self, data, engine, connection, delete_table: bool = False) -> None:
         table = self._create_table(engine)
-        if isinstance(data, pd.DataFrame):
-            self.__insert_dataframe(data, table, connection, delete_table)
-            return
-
-        if isinstance(data, np.ndarray):
-            data = data.tolist()
-        if not isinstance(data, list):
-            data = [data]
-
-        if len(data) == 0:
-            self.__delete_all_rows(table, connection, delete_table)
-            return
-
-        if isinstance(data[0], (tuple, list)):
-            self.__insert_tuples(data, table, connection, delete_table)
-        elif isinstance(data[0], dict):
-            self.__insert_dicts(data, table, connection, delete_table)
-        # If data is a primitive type, it will be inserted as a tuple of one element.
-        else:
-            self.__insert_tuples([(x,) for x in data], table, connection, delete_table)
+        self.__insert_dataframe(
+            self._convert_data_to_dataframe(self.properties[self._EXPOSED_TYPE_PROPERTY], data),
+            table,
+            connection,
+            delete_table,
+        )
 
     def _create_table(self, engine) -> Table:
         return Table(
@@ -161,24 +146,9 @@ class SQLTableDataNode(_AbstractSQLDataNode):
         connection.execute(table.insert(), data)
 
     @classmethod
-    def __insert_dataframe(
-        cls, df: pd.DataFrame, table: Any, connection: Any, delete_table: bool
-    ) -> None:
+    def __insert_dataframe(cls, df: pd.DataFrame, table: Any, connection: Any, delete_table: bool) -> None:
         cls.__insert_dicts(df.to_dict(orient="records"), table, connection, delete_table)
 
-    @classmethod
-    def __insert_tuples(cls, data: List[Union[Tuple, List]], table: Any, connection: Any, delete_table: bool) -> None:
-        """
-        This method will look up the length of the first object of the list and build the insert through
-        creation of a string of '?' equivalent to the length of the element. The '?' character is used as
-        placeholder for a tuple of same size.
-        """
-        cls.__delete_all_rows(table, connection, delete_table)
-        markers = ",".join("?" * len(data[0]))
-        ins = "INSERT INTO {tablename} VALUES ({markers})"
-        ins = ins.format(tablename=table.name, markers=markers)
-        connection.execute(ins, data)
-
     @classmethod
     def __delete_all_rows(cls, table: Any, connection: Any, delete_table: bool) -> None:
         if delete_table:

+ 0 - 3
taipy/core/job/_job_manager.py

@@ -61,9 +61,6 @@ class _JobManager(_Manager[Job], _VersionMixin):
     def _delete(cls, job: Job, force=False):
         if cls._is_deletable(job) or force:
             super()._delete(job.id)
-            from .._orchestrator._dispatcher._job_dispatcher import _JobDispatcher
-
-            _JobDispatcher._pop_dispatched_process(job.id)
         else:
             err = JobNotDeletedException(job.id)
             cls._logger.error(err)

+ 16 - 13
taipy/core/scenario/scenario.py

@@ -109,11 +109,9 @@ class Scenario(_Entity, Submittable, _Labeled):
         self._properties = _Properties(self, **properties)
         self._sequences: Dict[str, Dict] = sequences or {}
 
-        _scenario_task_ids = set(task.id if isinstance(task, Task) else task for task in self._tasks)
+        _scenario_task_ids = {task.id if isinstance(task, Task) else task for task in self._tasks}
         for sequence_name, sequence_data in self._sequences.items():
-            sequence_task_ids = set(
-                task.id if isinstance(task, Task) else task for task in sequence_data.get("tasks", [])
-            )
+            sequence_task_ids = {task.id if isinstance(task, Task) else task for task in sequence_data.get("tasks", [])}
             self.__check_sequence_tasks_exist_in_scenario_tasks(
                 sequence_name, sequence_task_ids, self.id, _scenario_task_ids
             )
@@ -233,10 +231,11 @@ class Scenario(_Entity, Submittable, _Labeled):
         subscribers: Optional[List[_Subscriber]] = None,
     ) -> Sequence:
         _scenario = _Reloader()._reload(self._MANAGER_NAME, self)
-        _scenario_task_ids = set(task.id if isinstance(task, Task) else task for task in _scenario._tasks)
-        _sequence_task_ids: Set[TaskId] = set(task.id if isinstance(task, Task) else task for task in tasks)
+        _scenario_task_ids = {task.id if isinstance(task, Task) else task for task in _scenario._tasks}
+        _sequence_task_ids: Set[TaskId] = {task.id if isinstance(task, Task) else task for task in tasks}
         self.__check_sequence_tasks_exist_in_scenario_tasks(name, _sequence_task_ids, self.id, _scenario_task_ids)
         from taipy.core.sequence._sequence_manager_factory import _SequenceManagerFactory
+
         seq_manager = _SequenceManagerFactory._build_manager()
         seq = seq_manager._create(name, tasks, subscribers or [], properties or {}, self.id, self.version)
         if not seq._is_consistent():
@@ -270,9 +269,9 @@ class Scenario(_Entity, Submittable, _Labeled):
             SequenceTaskDoesNotExistInScenario^: If a task in the sequence does not exist in the scenario.
         """
         _scenario = _Reloader()._reload(self._MANAGER_NAME, self)
-        _sc_task_ids = set(task.id if isinstance(task, Task) else task for task in _scenario._tasks)
+        _sc_task_ids = {task.id if isinstance(task, Task) else task for task in _scenario._tasks}
         for name, tasks in sequences.items():
-            _seq_task_ids: Set[TaskId] = set(task.id if isinstance(task, Task) else task for task in tasks)
+            _seq_task_ids: Set[TaskId] = {task.id if isinstance(task, Task) else task for task in tasks}
             self.__check_sequence_tasks_exist_in_scenario_tasks(name, _seq_task_ids, self.id, _sc_task_ids)
         # Need to parse twice the sequences to avoid adding some sequences and not others in case of exception
         for name, tasks in sequences.items():
@@ -327,11 +326,15 @@ class Scenario(_Entity, Submittable, _Labeled):
         self._sequences[new_name] = self._sequences[old_name]
         del self._sequences[old_name]
         self.sequences = self._sequences  # type: ignore
-        Notifier.publish(Event(EventEntityType.SCENARIO,
-                               EventOperation.UPDATE,
-                               entity_id=self.id,
-                               attribute_name="sequences",
-                               attribute_value=self._sequences))
+        Notifier.publish(
+            Event(
+                EventEntityType.SCENARIO,
+                EventOperation.UPDATE,
+                entity_id=self.id,
+                attribute_name="sequences",
+                attribute_value=self._sequences,
+            )
+        )
 
     @staticmethod
     def __check_sequence_tasks_exist_in_scenario_tasks(

+ 2 - 0
taipy/core/setup.py

@@ -36,6 +36,8 @@ def get_requirements():
     # get requirements from the different setups in tools/packages (removing taipy packages)
     reqs = set()
     for pkg in (root_folder / "tools" / "packages").iterdir():
+        if "taipy-core" not in str(pkg):
+            continue
         requirements_file = pkg / "setup.requirements.txt"
         if requirements_file.exists():
             reqs.update(requirements_file.read_text("UTF-8").splitlines())

+ 1 - 1
taipy/core/version.json

@@ -1 +1 @@
-{"major": 3, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 3, "minor": 2, "patch": 0, "ext": "dev0"}

+ 2 - 0
taipy/gui/_renderers/builder.py

@@ -775,6 +775,8 @@ class _Builder:
                 else:
                     self.__set_default_value(var_name, var_type=var_type)
         else:
+            if var_type == PropertyType.data:
+                _warn(f"{self.__control_type}.data property should be bound.")
             value = self.__attributes.get(var_name)
             if value is not None:
                 if native_type:

+ 2 - 0
taipy/gui/_renderers/factory.py

@@ -225,6 +225,7 @@ class _Factory:
                 ("extensions",),
                 ("drop_message",),
                 ("hover_text", PropertyType.dynamic_string),
+                ("notify", PropertyType.boolean, True),
             ]
         ),
         "image": lambda gui, control_type, attrs: _Builder(
@@ -490,6 +491,7 @@ class _Factory:
                 ("filter", PropertyType.boolean),
                 ("hover_text", PropertyType.dynamic_string),
                 ("size",),
+                ("downloadable", PropertyType.boolean),
             ]
         )
         ._set_propagate()

+ 4 - 1
taipy/gui/_renderers/json.py

@@ -12,10 +12,11 @@ from __future__ import annotations
 
 import typing as t
 from abc import ABC, abstractmethod
-from datetime import date, datetime, time
+from datetime import date, datetime, time, timedelta
 from json import JSONEncoder
 from pathlib import Path
 
+import pandas
 from flask.json.provider import DefaultJSONProvider
 
 from .._warnings import _warn
@@ -45,6 +46,8 @@ class _DefaultJsonAdapter(JsonAdapter):
             return _date_to_string(o)
         if isinstance(o, Path):
             return str(o)
+        if isinstance(o, (timedelta, pandas.Timedelta)):
+            return str(o)
 
 
 class _TaipyJsonAdapter(object, metaclass=_Singleton):

+ 8 - 1
taipy/gui/custom/_page.py

@@ -23,13 +23,20 @@ class Page(BasePage):
     A custom page for external application that can be added to Taipy GUI"""
 
     def __init__(
-        self, resource_handler: ResourceHandler, binding_variables: t.Optional[t.List[str]] = None, **kwargs
+        self,
+        resource_handler: ResourceHandler,
+        binding_variables: t.Optional[t.List[str]] = None,
+        metadata: t.Optional[t.Dict[str, t.Any]] = None,
+        **kwargs,
     ) -> None:
         if binding_variables is None:
             binding_variables = []
+        if metadata is None:
+            metadata = {}
         super().__init__(**kwargs)
         self._resource_handler = resource_handler
         self._binding_variables = binding_variables
+        self._metadata: t.Dict[str, t.Any] = metadata
 
 
 class ResourceHandler(ABC):

+ 6 - 1
taipy/gui/data/pandas_data_accessor.py

@@ -262,6 +262,7 @@ class _PandasDataAccessor(_DataAccessor):
             except Exception as e:
                 _warn(f"Dataframe filtering: invalid query '{query}' on {value.head()}", e)
 
+        dictret: t.Optional[t.Dict[str, t.Any]]
         if paged:
             aggregates = payload.get("aggregates")
             applies = payload.get("applies")
@@ -375,7 +376,11 @@ class _PandasDataAccessor(_DataAccessor):
                         except Exception as e:
                             _warn(f"Limit rows error with {decimator} for Dataframe", e)
             value = self.__build_transferred_cols(gui, columns, t.cast(pd.DataFrame, value), is_copied=is_copied)
-            dictret = self.__format_data(value, data_format, "list", data_extraction=True)
+            if payload.get("csv") is True:
+                ret_payload["df"] = value
+                dictret = None
+            else:
+                dictret = self.__format_data(value, data_format, "list", data_extraction=True)
         ret_payload["value"] = dictret
         return ret_payload
 

+ 15 - 8
taipy/gui/data/utils.py

@@ -118,7 +118,7 @@ def _df_relayout(
     if chart_mode not in ["lines+markers", "markers"]:
         return dataframe, is_copied
     # if chart data is invalid
-    if x0 is None or x1 is None or y0 is None or y1 is None:
+    if x0 is None and x1 is None and y0 is None and y1 is None:
         return dataframe, is_copied
     df = dataframe.copy() if not is_copied else dataframe
     is_copied = True
@@ -132,13 +132,20 @@ def _df_relayout(
         df[x_column] = df.index
         has_x_col = False
 
-    # if chart_mode is empty
-    if chart_mode == "lines+markers":
-        # only filter by x column
-        df = df.loc[(df[x_column] > x0) & (df[x_column] < x1)]
-    else:
-        # filter by both x and y columns
-        df = df.loc[(df[x_column] > x0) & (df[x_column] < x1) & (df[y_column] > y0) & (df[y_column] < y1)]  # noqa
+    df_filter_conditions = []
+    # filter by x column by default
+    if x0 is not None:
+        df_filter_conditions.append(df[x_column] > x0)
+    if x1 is not None:
+        df_filter_conditions.append(df[x_column] < x1)
+    # y column will be filtered only if chart_mode is not lines+markers (eg. markers)
+    if chart_mode != "lines+markers":
+        if y0 is not None:
+            df_filter_conditions.append(df[y_column] > y0)
+        if y1 is not None:
+            df_filter_conditions.append(df[y_column] < y1)
+    if df_filter_conditions:
+            df = df.loc[np.bitwise_and.reduce(df_filter_conditions)]
     if not has_x_col:
         df.drop(x_column, axis=1, inplace=True)
     return df, is_copied

+ 64 - 11
taipy/gui/gui.py

@@ -17,7 +17,6 @@ import inspect
 import json
 import math
 import os
-import pathlib
 import re
 import sys
 import tempfile
@@ -26,12 +25,24 @@ import typing as t
 import warnings
 from importlib import metadata, util
 from importlib.util import find_spec
+from pathlib import Path
+from tempfile import mkstemp
 from types import FrameType, FunctionType, LambdaType, ModuleType, SimpleNamespace
 from urllib.parse import unquote, urlencode, urlparse
 
 import markdown as md_lib
 import tzlocal
-from flask import Blueprint, Flask, g, has_app_context, jsonify, request, send_file, send_from_directory
+from flask import (
+    Blueprint,
+    Flask,
+    g,
+    has_app_context,
+    has_request_context,
+    jsonify,
+    request,
+    send_file,
+    send_from_directory,
+)
 from werkzeug.utils import secure_filename
 
 import __main__  # noqa: F401
@@ -214,6 +225,8 @@ class Gui:
     _HTML_CONTENT_KEY = "__taipy_html_content"
     __USER_CONTENT_CB = "custom_user_content_cb"
     __ROBOTO_FONT = "https://fonts.googleapis.com/css?family=Roboto:300,400,500,700&display=swap"
+    __DOWNLOAD_ACTION = "__Taipy__download_csv"
+    __DOWNLOAD_DELETE_ACTION = "__Taipy__download_delete_csv"
 
     __RE_HTML = re.compile(r"(.*?)\.html$")
     __RE_MD = re.compile(r"(.*?)\.md$")
@@ -332,7 +345,7 @@ class Gui:
 
         # get taipy version
         try:
-            gui_file = pathlib.Path(__file__ or ".").resolve()
+            gui_file = Path(__file__ or ".").resolve()
             with open(gui_file.parent / "version.json") as version_file:
                 self.__version = json.load(version_file)
         except Exception as e:  # pragma: no cover
@@ -888,7 +901,7 @@ class Gui:
                     elts.append(elt_dict)
         status.update({"libraries": libraries})
 
-    def _serve_status(self, template: pathlib.Path) -> t.Dict[str, t.Dict[str, str]]:
+    def _serve_status(self, template: Path) -> t.Dict[str, t.Dict[str, str]]:
         base_json: t.Dict[str, t.Any] = {"user_status": str(self.__call_on_status() or "")}
         if self._get_config("extended_status", False):
             base_json.update(
@@ -932,7 +945,7 @@ class Gui:
                 suffix = f".part.{part}"
                 complete = part == total - 1
         if file:  # and allowed_file(file.filename)
-            upload_path = pathlib.Path(self._get_config("upload_folder", tempfile.gettempdir())).resolve()
+            upload_path = Path(self._get_config("upload_folder", tempfile.gettempdir())).resolve()
             file_path = _get_non_existent_file_path(upload_path, secure_filename(file.filename))
             file.save(str(upload_path / (file_path.name + suffix)))
             if complete:
@@ -1002,7 +1015,13 @@ class Gui:
                 elif isinstance(newvalue, _TaipyToJson):
                     newvalue = newvalue.get()
                 if isinstance(newvalue, (dict, _MapDict)):
-                    continue  # this var has no transformer
+                    # Skip in taipy-gui, available in custom frontend
+                    resource_handler_id = None
+                    with contextlib.suppress(Exception):
+                        if has_request_context():
+                            resource_handler_id = request.cookies.get(_Server._RESOURCE_HANDLER_ARG, None)
+                    if resource_handler_id is None:
+                        continue  # this var has no transformer
                 if isinstance(newvalue, float) and math.isnan(newvalue):
                     # do not let NaN go through json, it is not handle well (dies silently through websocket)
                     newvalue = None
@@ -1304,6 +1323,31 @@ class Gui:
             cls = self.__locals_context.get_default().get(class_name)
         return cls if isinstance(cls, class_type) else class_name
 
+    def __download_csv(self, state: State, var_name: str, payload: dict):
+        holder_name = t.cast(str, payload.get("var_name"))
+        ret = self._accessors._get_data(
+            self,
+            holder_name,
+            _getscopeattr(self, holder_name, None),
+            {"alldata": True, "csv": True},
+        )
+        if isinstance(ret, dict):
+            df = ret.get("df")
+            try:
+                fd, temp_path = mkstemp(".csv", var_name, text=True)
+                with os.fdopen(fd, "wt", newline="") as csv_file:
+                    df.to_csv(csv_file, index=False)  # type:ignore
+                self._download(temp_path, "data.csv", Gui.__DOWNLOAD_DELETE_ACTION)
+            except Exception as e:  # pragma: no cover
+                if not self._call_on_exception("download_csv", e):
+                    _warn("download_csv(): Exception raised", e)
+
+    def __delete_csv(self, state: State, var_name: str, payload: dict):
+        try:
+            (Path(tempfile.gettempdir()) / t.cast(str, payload.get("args", [])[-1]).split("/")[-1]).unlink(True)
+        except Exception:
+            pass
+
     def __on_action(self, id: t.Optional[str], payload: t.Any) -> None:
         if isinstance(payload, dict):
             action = payload.get("action")
@@ -1311,7 +1355,15 @@ class Gui:
             action = str(payload)
             payload = {"action": action}
         if action:
-            if self.__call_function_with_args(action_function=self._get_user_function(action), id=id, payload=payload):
+            action_fn: t.Union[t.Callable, str]
+            if Gui.__DOWNLOAD_ACTION == action:
+                action_fn = self.__download_csv
+                payload["var_name"] = id
+            elif Gui.__DOWNLOAD_DELETE_ACTION == action:
+                action_fn = self.__delete_csv
+            else:
+                action_fn = self._get_user_function(action)
+            if self.__call_function_with_args(action_function=action_fn, id=id, payload=payload):
                 return
             else:  # pragma: no cover
                 _warn(f"on_action(): '{action}' is not a valid function.")
@@ -1895,7 +1947,7 @@ class Gui:
             else:
                 _warn("download() on_action is invalid.")
         content_str = self._get_content("Gui.download", content, False)
-        self.__send_ws_download(content_str, str(name), str(on_action))
+        self.__send_ws_download(content_str, str(name), str(on_action) if on_action is not None else "")
 
     def _notify(
         self,
@@ -2062,6 +2114,7 @@ class Gui:
                 to=page_name,
                 params={
                     _Server._RESOURCE_HANDLER_ARG: pr._resource_handler.get_id(),
+                    _Server._CUSTOM_PAGE_META_ARG: json.dumps(pr._metadata, cls=_TaipyJsonEncoder)
                 },
             ):
                 # Proactively handle the bindings of custom page variables
@@ -2117,7 +2170,7 @@ class Gui:
 
     def _set_css_file(self, css_file: t.Optional[str] = None):
         if css_file is None:
-            script_file = pathlib.Path(self.__frame.f_code.co_filename or ".").resolve()
+            script_file = Path(self.__frame.f_code.co_filename or ".").resolve()
             if script_file.with_suffix(".css").exists():
                 css_file = f"{script_file.stem}.css"
             elif script_file.is_dir() and (script_file / "taipy.css").exists():
@@ -2130,9 +2183,9 @@ class Gui:
 
     def _get_webapp_path(self):
         _conf_webapp_path = (
-            pathlib.Path(self._get_config("webapp_path", None)) if self._get_config("webapp_path", None) else None
+            Path(self._get_config("webapp_path", None)) if self._get_config("webapp_path", None) else None
         )
-        _webapp_path = str((pathlib.Path(__file__).parent / "webapp").resolve())
+        _webapp_path = str((Path(__file__).parent / "webapp").resolve())
         if _conf_webapp_path:
             if _conf_webapp_path.is_dir():
                 _webapp_path = str(_conf_webapp_path.resolve())

+ 3 - 0
taipy/gui/page.py

@@ -46,6 +46,9 @@ class Page:
             self._frame = self._renderer._frame
         elif isinstance(self, CustomPage):
             self._frame = t.cast(FrameType, t.cast(FrameType, inspect.stack()[2].frame))
+            # Allow CustomPage class to be inherited
+            if len(inspect.stack()) > 3 and inspect.stack()[2].function != "<module>":
+                self._frame = t.cast(FrameType, t.cast(FrameType, inspect.stack()[3].frame))
         elif len(inspect.stack()) < 4:
             raise RuntimeError(f"Can't resolve module. Page '{type(self).__name__}' is not registered.")
         else:

+ 1 - 0
taipy/gui/server.py

@@ -48,6 +48,7 @@ class _Server:
     __OPENING_CURLY = r"\1&#x7B;"
     __CLOSING_CURLY = r"&#x7D;\2"
     _RESOURCE_HANDLER_ARG = "tprh"
+    _CUSTOM_PAGE_META_ARG = "tp_cp_meta"
 
     def __init__(
         self,

+ 2 - 0
taipy/gui/setup.py

@@ -36,6 +36,8 @@ def get_requirements():
     # get requirements from the different setups in tools/packages (removing taipy packages)
     reqs = set()
     for pkg in (root_folder / "tools" / "packages").iterdir():
+        if "taipy-gui" not in str(pkg):
+            continue
         requirements_file = pkg / "setup.requirements.txt"
         if requirements_file.exists():
             reqs.update(requirements_file.read_text("UTF-8").splitlines())

+ 1 - 1
taipy/gui/version.json

@@ -1 +1 @@
-{"major": 3, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 3, "minor": 2, "patch": 0, "ext": "dev0"}

+ 12 - 1
taipy/gui/viselements.json

@@ -667,6 +667,12 @@
             "type": "str",
             "default_value": "\"Drop here to Upload\"",
             "doc": "The message that is displayed when the user drags a file above the button."
+          },
+          {
+            "name": "notify",
+            "type": "bool",
+            "default_value": "True",
+            "doc": "If set to False, the user won't be notified of upload finish."
           }
         ]
       }
@@ -1105,7 +1111,7 @@
           {
             "name": "on_action",
             "type": "str",
-            "doc": "The name of a function that is triggered when the user selects a row.<br/>All parameters of that function are optional:\n<ul>\n<li>state (<code>State^</code>): the state instance.</li>\n<li>var_name (str): the name of the tabular data variable.</li>\n<li>payload (dict): the details on this callback's invocation.<br/>This dictionary has the following keys:\n<ul>\n<li>action: the name of the action that triggered this callback.</li>\n<li>index (int): the row index.</li>\n<li>col (str): the column name.</li></ul></li></ul>.",
+            "doc": "The name of a function that is triggered when the user selects a row.<br/>All parameters of that function are optional:\n<ul>\n<li>state (<code>State^</code>): the state instance.</li>\n<li>var_name (str): the name of the tabular data variable.</li>\n<li>payload (dict): the details on this callback's invocation.<br/>This dictionary has the following keys:\n<ul>\n<li>action: the name of the action that triggered this callback.</li>\n<li>index (int): the row index.</li>\n<li>col (str): the column name.</li>\n<li>reason (str): the origin of the action: \"click\", or \"button\" if the cell contains a Markdown link syntax.</li>\n<li>value (str): the *link value* indicated in the cell when using a Markdown link syntax (that is, <i>reason</i> is set to \"button\").</li></ul></li></ul>.",
             "signature": [["state", "State"], ["var_name", "str"], ["payload", "dict"]]
           },
           {
@@ -1124,6 +1130,11 @@
             "name": "lov[<i>column_name</i>]",
             "type": "list[str]|str",
             "doc": "The list of values of the indicated column."
+          },
+          {
+            "name": "downloadable",
+            "type": "boolean",
+            "doc": "If True, a clickable icon is shown so the user can download the data as CSV."
           }
         ]
       }

+ 2 - 0
taipy/gui_core/_GuiCoreLib.py

@@ -99,9 +99,11 @@ class _GuiCore(ElementLibrary):
                 "width": ElementProperty(PropertyType.string),
                 "height": ElementProperty(PropertyType.string),
                 "class_name": ElementProperty(PropertyType.dynamic_string),
+                "on_action": ElementProperty(PropertyType.function),
             },
             inner_properties={
                 "core_changed": ElementProperty(PropertyType.broadcast, _GuiCoreContext._CORE_CHANGED_NAME),
+                "on_select": ElementProperty(PropertyType.function, f"{{{__CTX_VAR_NAME}.on_dag_select}}"),
             },
         ),
         "data_node_selector": Element(

+ 21 - 2
taipy/gui_core/_context.py

@@ -124,7 +124,7 @@ class _GuiCoreContext(CoreEventConsumerBase):
                     if sequence and hasattr(sequence, "parent_ids") and sequence.parent_ids:  # type: ignore
                         self.gui._broadcast(
                             _GuiCoreContext._CORE_CHANGED_NAME,
-                            {"scenario": [x for x in sequence.parent_ids]},  # type: ignore
+                            {"scenario": list(sequence.parent_ids)},  # type: ignore
                         )
             except Exception as e:
                 _warn(f"Access to sequence {event.entity_id} failed", e)
@@ -650,7 +650,7 @@ class _GuiCoreContext(CoreEventConsumerBase):
             if isinstance(ent, Scenario):
                 tags = data.get(_GuiCoreContext.__PROP_SCENARIO_TAGS)
                 if isinstance(tags, (list, tuple)):
-                    ent.tags = {t for t in tags}
+                    ent.tags = dict(tags)
             name = data.get(_GuiCoreContext.__PROP_ENTITY_NAME)
             if isinstance(name, str):
                 if hasattr(ent, _GuiCoreContext.__PROP_ENTITY_NAME):
@@ -939,3 +939,22 @@ class _GuiCoreContext(CoreEventConsumerBase):
             state.assign(_GuiCoreContext._DATANODE_VIZ_DATA_ID_VAR, data_id)
         elif chart_id := data.get("chart_id"):
             state.assign(_GuiCoreContext._DATANODE_VIZ_DATA_CHART_ID_VAR, chart_id)
+
+    def on_dag_select(self, state: State, id: str, payload: t.Dict[str, str]):
+        args = payload.get("args")
+        if args is None or not isinstance(args, list) or len(args) < 2:
+            return
+        on_action_function = self.gui._get_user_function(args[1]) if args[1] else None
+        if callable(on_action_function):
+            try:
+                entity = core_get(args[0]) if is_readable(args[0]) else f"unredable({args[0]})"
+                self.gui._call_function_with_state(
+                    on_action_function,
+                    [entity],
+                )
+            except Exception as e:
+                if not self.gui._call_on_exception(args[1], e):
+                    _warn(f"dag.on_action(): Exception raised in '{args[1]}()' with '{args[0]}'", e)
+        elif args[1]:
+            _warn(f"dag.on_action(): Invalid function '{args[1]}()'.")
+

+ 6 - 0
taipy/gui_core/viselements.json

@@ -207,6 +207,12 @@
                         "type": "str",
                         "default_value": "\"100%\"",
                         "doc": "The maximum width, in CSS units, of the control."
+                    },
+                    {
+                        "name": "on_action",
+                        "type": "Callback",
+                        "doc": "The name of the function that is triggered when a a node is selected.<br/><br/>All the parameters of that function are optional:\n<ul>\n<li>state (<code>State^</code>): the state instance.</li>\n<li>entity (DataNode | Task): the entity (DataNode or Task) that was selected.</li>\n</ul>",
+                        "signature": [["state", "State"], ["entity", "Task | DataNode"]]
                     }
                 ]
             }

+ 2 - 0
taipy/rest/setup.py

@@ -31,6 +31,8 @@ def get_requirements():
     # get requirements from the different setups in tools/packages (removing taipy packages)
     reqs = set()
     for pkg in (root_folder / "tools" / "packages").iterdir():
+        if "taipy-rest" not in str(pkg):
+            continue
         requirements_file = pkg / "setup.requirements.txt"
         if requirements_file.exists():
             reqs.update(requirements_file.read_text("UTF-8").splitlines())

+ 1 - 1
taipy/rest/version.json

@@ -1 +1 @@
-{"major": 3, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 3, "minor": 2, "patch": 0, "ext": "dev0"}

+ 1 - 1
taipy/templates/version.json

@@ -1 +1 @@
-{"major": 3, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 3, "minor": 2, "patch": 0, "ext": "dev0"}

+ 1 - 1
taipy/version.json

@@ -1 +1 @@
-{"major": 3, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 3, "minor": 2, "patch": 0, "ext": "dev0"}

+ 2 - 0
tests/core/_entity/test_migrate_cli.py

@@ -26,6 +26,8 @@ from taipy.core._entity._migrate_cli import _MigrateCLI
 def clean_data_folder():
     if os.path.exists("tests/core/_entity/.data"):
         shutil.rmtree("tests/core/_entity/.data")
+    if os.path.exists("tests/core/_entity/.taipy"):
+        shutil.rmtree("tests/core/_entity/.taipy")
     yield
 
 

+ 2 - 10
tests/core/_orchestrator/_dispatcher/mock_standalone_dispatcher.py

@@ -38,10 +38,10 @@ class MockStandaloneDispatcher(_StandaloneJobDispatcher):
     def __init__(self, orchestrator: _AbstractOrchestrator):
         super(_StandaloneJobDispatcher, self).__init__(orchestrator)
         self._executor: Executor = MockProcessPoolExecutor()
+        self._nb_available_workers = 1
+
         self.dispatch_calls: List = []
         self.release_worker_calls: List = []
-        self.set_dispatch_processes_calls: List = []
-        self.pop_dispatch_processes_calls: List = []
         self.update_job_status_from_future_calls: List = []
 
     def mock_exception_for_job(self, task_id, e: Exception):
@@ -51,14 +51,6 @@ class MockStandaloneDispatcher(_StandaloneJobDispatcher):
         self.dispatch_calls.append(job)
         super()._dispatch(job)
 
-    def _set_dispatched_processes(self, job_id, future):
-        self.set_dispatch_processes_calls.append((job_id, future))
-        super()._set_dispatched_processes(job_id, future)
-
-    def _pop_dispatched_process(self, job_id, default=None):
-        self.pop_dispatch_processes_calls.append(job_id)
-        return super()._pop_dispatched_process(job_id, default)
-
     def _release_worker(self, _):
         self.release_worker_calls.append(None)
         super()._release_worker(_)

+ 6 - 0
tests/core/_orchestrator/_dispatcher/test_development_job_dispatcher.py

@@ -13,6 +13,7 @@ import traceback
 from unittest.mock import patch
 
 from taipy.core import JobId
+from taipy.core._orchestrator._dispatcher import _DevelopmentJobDispatcher
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.job.job import Job
 from taipy.core.task._task_manager_factory import _TaskManagerFactory
@@ -61,3 +62,8 @@ def test_dispatch_executes_the_function_with_exceptions():
     assert job.stacktrace[1] == "".join(traceback.format_exception(type(e_2), value=e_2, tb=e_2.__traceback__))
     assert job.stacktrace[0] == "".join(traceback.format_exception(type(e_1), value=e_1, tb=e_1.__traceback__))
     assert job.is_failed()
+
+
+def test_can_execute():
+    dispatcher = _DevelopmentJobDispatcher(_OrchestratorFactory._orchestrator)
+    assert dispatcher._can_execute()

+ 0 - 12
tests/core/_orchestrator/_dispatcher/test_dispatcher__execute_job.py

@@ -33,18 +33,6 @@ def create_scenario():
     return taipy.create_scenario(sc_conf)
 
 
-def test_can_execute():
-    dispatcher = _JobDispatcher(_OrchestratorFactory._orchestrator)
-    assert dispatcher._nb_available_workers == 1
-    assert dispatcher._can_execute()
-    dispatcher._nb_available_workers = 0
-    assert not dispatcher._can_execute()
-    dispatcher._nb_available_workers = -1
-    assert not dispatcher._can_execute()
-    dispatcher._nb_available_workers = 1
-    assert dispatcher._can_execute()
-
-
 def test_execute_job():
     scenario = create_scenario()
     scenario.t1.skippable = True  # make the job skippable

+ 12 - 9
tests/core/_orchestrator/_dispatcher/test_standalone_job_dispatcher.py

@@ -70,11 +70,6 @@ def test_dispatch_job():
     assert submit_first_call[1] == ()
     assert submit_first_call[2]["config_as_string"] == _TomlSerializer()._serialize(Config._applied_config)
 
-    # test that the proc of the job is added to the list of dispatched jobs
-    assert len(dispatcher.set_dispatch_processes_calls) == 1
-    assert dispatcher.set_dispatch_processes_calls[0][0] == job.id
-    assert dispatcher.set_dispatch_processes_calls[0][1] == dispatcher._executor.f[0]
-
     # test that the worker is released after the job is done
     assert len(dispatcher.release_worker_calls) == 1
 
@@ -84,6 +79,18 @@ def test_dispatch_job():
     assert dispatcher.update_job_status_from_future_calls[0][1] == dispatcher._executor.f[0]
 
 
+def test_can_execute():
+    dispatcher = _StandaloneJobDispatcher(_OrchestratorFactory._orchestrator)
+    assert dispatcher._nb_available_workers == 1
+    assert dispatcher._can_execute()
+    dispatcher._nb_available_workers = 0
+    assert not dispatcher._can_execute()
+    dispatcher._nb_available_workers = -1
+    assert not dispatcher._can_execute()
+    dispatcher._nb_available_workers = 1
+    assert dispatcher._can_execute()
+
+
 def test_release_worker():
     dispatcher = _StandaloneJobDispatcher(_OrchestratorFactory._orchestrator)
 
@@ -101,11 +108,7 @@ def test_update_job_status_from_future():
     dispatcher = _StandaloneJobDispatcher(orchestrator)
     ft = Future()
     ft.set_result(None)
-    dispatcher._set_dispatched_processes(job.id, ft)  # the job is dispatched to a process
-
     dispatcher._update_job_status_from_future(job, ft)
-
-    assert len(dispatcher._dispatched_processes) == 0  # the job process is not stored anymore
     assert job.is_completed()
 
 

+ 0 - 3
tests/core/_orchestrator/_dispatcher/test_task_function_wrapper.py

@@ -17,7 +17,6 @@ from taipy.config._serializer._toml_serializer import _TomlSerializer
 from taipy.config.common.scope import Scope
 from taipy.config.exceptions import ConfigurationUpdateBlocked
 from taipy.core._orchestrator._dispatcher._task_function_wrapper import _TaskFunctionWrapper
-from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.data._data_manager import _DataManager
 from taipy.core.task.task import Task
 
@@ -84,9 +83,7 @@ def test_execute_task_that_returns_single_iterable_output():
     _TaskFunctionWrapper("job_id_list", task_with_list).execute()
 
     assert task_with_tuple.output[f"{task_with_tuple.config_id}_output0"].read() == (42, 21)
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0
     assert task_with_list.output[f"{task_with_list.config_id}_output0"].read() == [42, 21]
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0
 
 
 def test_data_node_not_written_due_to_wrong_result_nb():

+ 28 - 40
tests/core/_orchestrator/test_orchestrator.py

@@ -14,11 +14,13 @@ import random
 import string
 from functools import partial
 from time import sleep
+from typing import cast
 
 import pytest
 
 from taipy.config import Config
 from taipy.config.common.scope import Scope
+from taipy.core._orchestrator._dispatcher import _StandaloneJobDispatcher
 from taipy.core._orchestrator._orchestrator import _Orchestrator
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.config.job_config import JobConfig
@@ -57,7 +59,7 @@ def test_submit_task_multithreading_multiple_task():
     task_1 = _create_task(partial(lock_multiply, lock_1))
     task_2 = _create_task(partial(lock_multiply, lock_2))
 
-    _OrchestratorFactory._build_dispatcher(force_restart=True)
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
 
     with lock_1:
         with lock_2:
@@ -70,7 +72,7 @@ def test_submit_task_multithreading_multiple_task():
             assert task_2.output[f"{task_2.config_id}_output0"].read() == 0
             assert_true_after_time(job_1.is_running)
             assert_true_after_time(job_2.is_running)
-            assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 2
+            assert dispatcher._nb_available_workers == 0
             assert_submission_status(submission_1, SubmissionStatus.RUNNING)
             assert_submission_status(submission_2, SubmissionStatus.RUNNING)
 
@@ -78,14 +80,14 @@ def test_submit_task_multithreading_multiple_task():
         assert_true_after_time(job_1.is_running)
         assert task_2.output[f"{task_2.config_id}_output0"].read() == 42
         assert task_1.output[f"{task_1.config_id}_output0"].read() == 0
-        assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1
+        assert dispatcher._nb_available_workers == 1
         assert_submission_status(submission_1, SubmissionStatus.RUNNING)
         assert_submission_status(submission_2, SubmissionStatus.COMPLETED)
 
     assert_true_after_time(job_1.is_completed)
     assert job_2.is_completed()
     assert task_1.output[f"{task_1.config_id}_output0"].read() == 42
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0
+    assert dispatcher._nb_available_workers == 2
     assert_submission_status(submission_1, SubmissionStatus.COMPLETED)
     assert submission_2.submission_status == SubmissionStatus.COMPLETED
 
@@ -93,17 +95,13 @@ def test_submit_task_multithreading_multiple_task():
 @pytest.mark.orchestrator_dispatcher
 def test_submit_submittable_multithreading_multiple_task():
     Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=2)
-
     m = multiprocessing.Manager()
     lock_1 = m.Lock()
     lock_2 = m.Lock()
-
     task_1 = _create_task(partial(lock_multiply, lock_1))
     task_2 = _create_task(partial(lock_multiply, lock_2))
-
-    scenario = Scenario("scenario_config", [task_1, task_2], {})
-
-    _OrchestratorFactory._build_dispatcher(force_restart=True)
+    scenario = Scenario("scenario_config", {task_1, task_2}, {})
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
 
     with lock_1:
         with lock_2:
@@ -116,20 +114,20 @@ def test_submit_submittable_multithreading_multiple_task():
             assert task_2.output[f"{task_2.config_id}_output0"].read() == 0
             assert_true_after_time(job_1.is_running)
             assert_true_after_time(job_2.is_running)
-            assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 2  # Two processes used
+            assert dispatcher._nb_available_workers == 0  # Two processes used
             assert_submission_status(submission, SubmissionStatus.RUNNING)
         assert_true_after_time(job_2.is_completed)
         assert_true_after_time(job_1.is_running)
         assert task_2.output[f"{task_2.config_id}_output0"].read() == 42
         assert task_1.output[f"{task_1.config_id}_output0"].read() == 0
-        assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1  # job 1 is completed: One process used
         assert_submission_status(submission, SubmissionStatus.RUNNING)
+        assert dispatcher._nb_available_workers == 1  # job 1 is completed: One process used
 
     assert_true_after_time(job_1.is_completed)
     assert job_2.is_completed()
     assert task_1.output[f"{task_1.config_id}_output0"].read() == 42
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0  # No more process used.
     assert_submission_status(submission, SubmissionStatus.COMPLETED)
+    assert dispatcher._nb_available_workers == 2  # No more process used.
 
 
 @pytest.mark.orchestrator_dispatcher
@@ -142,13 +140,13 @@ def test_submit_task_multithreading_multiple_task_in_sync_way_to_check_job_statu
     task_0 = _create_task(partial(lock_multiply, lock_0))
     task_1 = _create_task(partial(lock_multiply, lock_1))
     task_2 = _create_task(partial(lock_multiply, lock_2))
-    _OrchestratorFactory._build_dispatcher(force_restart=True)
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
 
     with lock_0:
         submission_0 = _Orchestrator.submit_task(task_0)
         job_0 = submission_0._jobs[0]
         assert_true_after_time(job_0.is_running)
-        assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1
+        assert dispatcher._nb_available_workers == 1
         assert_submission_status(submission_0, SubmissionStatus.RUNNING)
         with lock_1:
             with lock_2:
@@ -164,12 +162,12 @@ def test_submit_task_multithreading_multiple_task_in_sync_way_to_check_job_statu
                 assert_submission_status(submission_0, SubmissionStatus.RUNNING)
                 assert_submission_status(submission_1, SubmissionStatus.PENDING)
                 assert_submission_status(submission_2, SubmissionStatus.RUNNING)
-                assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 2
+                assert dispatcher._nb_available_workers == 0
 
             assert_true_after_time(job_0.is_running)
             assert_true_after_time(job_1.is_running)
             assert_true_after_time(job_2.is_completed)
-            assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 2
+            assert dispatcher._nb_available_workers == 0
             assert task_2.output[f"{task_2.config_id}_output0"].read() == 42
             assert task_1.output[f"{task_1.config_id}_output0"].read() == 0
             assert_submission_status(submission_0, SubmissionStatus.RUNNING)
@@ -179,7 +177,7 @@ def test_submit_task_multithreading_multiple_task_in_sync_way_to_check_job_statu
         assert_true_after_time(job_0.is_running)
         assert_true_after_time(job_1.is_completed)
         assert job_2.is_completed()
-        assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1
+        assert dispatcher._nb_available_workers == 1
         assert task_1.output[f"{task_1.config_id}_output0"].read() == 42
         assert task_0.output[f"{task_0.config_id}_output0"].read() == 0
         assert_submission_status(submission_0, SubmissionStatus.RUNNING)
@@ -189,7 +187,7 @@ def test_submit_task_multithreading_multiple_task_in_sync_way_to_check_job_statu
     assert_true_after_time(job_0.is_completed)
     assert job_1.is_completed()
     assert job_2.is_completed()
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0
+    assert dispatcher._nb_available_workers == 2
     assert task_0.output[f"{task_0.config_id}_output0"].read() == 42
     assert _SubmissionManager._get(job_0.submit_id).submission_status == SubmissionStatus.COMPLETED
     assert _SubmissionManager._get(job_1.submit_id).submission_status == SubmissionStatus.COMPLETED
@@ -198,17 +196,15 @@ def test_submit_task_multithreading_multiple_task_in_sync_way_to_check_job_statu
 
 @pytest.mark.orchestrator_dispatcher
 def test_blocked_task():
-    Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=2)
-
+    Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=4)
     m = multiprocessing.Manager()
     lock_1 = m.Lock()
     lock_2 = m.Lock()
-
     foo_cfg = Config.configure_data_node("foo", default_data=1)
     bar_cfg = Config.configure_data_node("bar")
     baz_cfg = Config.configure_data_node("baz")
 
-    _OrchestratorFactory._build_dispatcher(force_restart=True)
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
 
     dns = _DataManager._bulk_get_or_create([foo_cfg, bar_cfg, baz_cfg])
     foo = dns[foo_cfg]
@@ -216,16 +212,14 @@ def test_blocked_task():
     baz = dns[baz_cfg]
     task_1 = Task("by_2", {}, partial(lock_multiply, lock_1, 2), [foo], [bar])
     task_2 = Task("by_3", {}, partial(lock_multiply, lock_2, 3), [bar], [baz])
-
     assert task_1.foo.is_ready_for_reading  # foo is ready
     assert not task_1.bar.is_ready_for_reading  # But bar is not ready
     assert not task_2.baz.is_ready_for_reading  # neither does baz
-
     assert len(_Orchestrator.blocked_jobs) == 0
     submission_2 = _Orchestrator.submit_task(task_2)
     job_2 = submission_2._jobs[0]  # job 2 is submitted
     assert job_2.is_blocked()  # since bar is not is_valid the job 2 is blocked
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0  # No process used
+    assert dispatcher._nb_available_workers == 4  # No process used
     assert _SubmissionManager._get(job_2.submit_id).submission_status == SubmissionStatus.BLOCKED
     assert len(_Orchestrator.blocked_jobs) == 1  # One job (job 2) is blocked
     with lock_2:
@@ -233,7 +227,7 @@ def test_blocked_task():
             submission_1 = _Orchestrator.submit_task(task_1)
             job_1 = submission_1._jobs[0]  # job 1 is submitted and locked
             assert_true_after_time(job_1.is_running)  # so it is still running
-            assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1  # One process used for job 1
+            assert dispatcher._nb_available_workers == 3  # One process used for job 1
             assert not _DataManager._get(task_1.bar.id).is_ready_for_reading  # And bar still not ready
             assert job_2.is_blocked  # the job_2 remains blocked
             assert_submission_status(submission_1, SubmissionStatus.RUNNING)
@@ -242,14 +236,14 @@ def test_blocked_task():
         assert _DataManager._get(task_1.bar.id).is_ready_for_reading  # bar becomes ready
         assert _DataManager._get(task_1.bar.id).read() == 2  # the data is computed and written
         assert_true_after_time(job_2.is_running)  # And job 2 can start running
-        assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1  # One process used for job 2
+        assert dispatcher._nb_available_workers == 3  # One process used for job 2
         assert len(_Orchestrator.blocked_jobs) == 0
         assert_submission_status(submission_1, SubmissionStatus.COMPLETED)
         assert_submission_status(submission_2, SubmissionStatus.RUNNING)
     assert_true_after_time(job_2.is_completed)  # job 2 unlocked so it can complete
     assert _DataManager._get(task_2.baz.id).is_ready_for_reading  # baz becomes ready
     assert _DataManager._get(task_2.baz.id).read() == 6  # the data is computed and written
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0  # No more process used.
+    assert dispatcher._nb_available_workers == 4  # No more process used.
     assert submission_1.submission_status == SubmissionStatus.COMPLETED
     assert_submission_status(submission_2, SubmissionStatus.COMPLETED)
 
@@ -257,29 +251,23 @@ def test_blocked_task():
 @pytest.mark.orchestrator_dispatcher
 def test_blocked_submittable():
     Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=2)
-
     m = multiprocessing.Manager()
     lock_1 = m.Lock()
     lock_2 = m.Lock()
-
     foo_cfg = Config.configure_data_node("foo", default_data=1)
     bar_cfg = Config.configure_data_node("bar")
     baz_cfg = Config.configure_data_node("baz")
-
-    _OrchestratorFactory._build_dispatcher(force_restart=True)
-
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
     dns = _DataManager._bulk_get_or_create([foo_cfg, bar_cfg, baz_cfg])
     foo = dns[foo_cfg]
     bar = dns[bar_cfg]
     baz = dns[baz_cfg]
     task_1 = Task("by_2", {}, partial(lock_multiply, lock_1, 2), [foo], [bar])
     task_2 = Task("by_3", {}, partial(lock_multiply, lock_2, 3), [bar], [baz])
-    scenario = Scenario("scenario_config", [task_1, task_2], {})
-
+    scenario = Scenario("scenario_config", {task_1, task_2}, {})
     assert task_1.foo.is_ready_for_reading  # foo is ready
     assert not task_1.bar.is_ready_for_reading  # But bar is not ready
     assert not task_2.baz.is_ready_for_reading  # neither does baz
-
     assert len(_Orchestrator.blocked_jobs) == 0
     with lock_2:
         with lock_1:
@@ -287,23 +275,23 @@ def test_blocked_submittable():
             tasks_jobs = {job._task.id: job for job in submission._jobs}
             job_1, job_2 = tasks_jobs[task_1.id], tasks_jobs[task_2.id]
             assert_true_after_time(job_1.is_running)  # job 1 is submitted and locked so it is still running
-            assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1
             assert not _DataManager._get(task_1.bar.id).is_ready_for_reading  # And bar still not ready
             assert job_2.is_blocked  # the job_2 remains blocked
             assert_submission_status(submission, SubmissionStatus.RUNNING)
+            assert dispatcher._nb_available_workers == 1
         assert_true_after_time(job_1.is_completed)  # job1 unlocked and can complete
         assert _DataManager._get(task_1.bar.id).is_ready_for_reading  # bar becomes ready
         assert _DataManager._get(task_1.bar.id).read() == 2  # the data is computed and written
         assert_true_after_time(job_2.is_running)  # And job 2 can start running
-        assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 1 # Still one process
         # currently used since the previous process is not used anymore
         assert len(_Orchestrator.blocked_jobs) == 0
         assert_submission_status(submission, SubmissionStatus.RUNNING)
+        assert dispatcher._nb_available_workers == 1 # Still one process
     assert_true_after_time(job_2.is_completed)  # job 2 unlocked so it can complete
     assert _DataManager._get(task_2.baz.id).is_ready_for_reading  # baz becomes ready
     assert _DataManager._get(task_2.baz.id).read() == 6  # the data is computed and written
-    assert len(_OrchestratorFactory._dispatcher._dispatched_processes) == 0  # No more process used.
     assert_submission_status(submission, SubmissionStatus.COMPLETED)
+    assert dispatcher._nb_available_workers == 2  # No more process used.
 
 
 # ################################  UTIL METHODS    ##################################

+ 4 - 3
tests/core/_orchestrator/test_orchestrator__cancel_jobs.py

@@ -8,10 +8,12 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
+from typing import cast
 
 from taipy import Job, JobId, Status
 from taipy.config import Config
 from taipy.core import taipy
+from taipy.core._orchestrator._orchestrator import _Orchestrator
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.job._job_manager_factory import _JobManagerFactory
 from taipy.core.task._task_manager_factory import _TaskManagerFactory
@@ -89,11 +91,10 @@ def test_cancel_job_with_subsequent_jobs_and_parallel_jobs():
     job3 = orchestrator._lock_dn_output_and_create_job(scenario.t3, "s_id", "e_id")
     job2bis = orchestrator._lock_dn_output_and_create_job(scenario.t2bis, "s_id", "e_id")
     job1.completed()
-
-    job2.running()
+    job2.pending()
     job3.blocked()
     job2bis.pending()
-    orchestrator.blocked_jobs = [job3]
+    cast(_Orchestrator, orchestrator).blocked_jobs = [job3]
 
     orchestrator.cancel_job(job2)
 

+ 3 - 6
tests/core/config/test_core_version.py

@@ -9,7 +9,7 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-from unittest.mock import patch
+from unittest import mock
 
 import pytest
 
@@ -24,18 +24,15 @@ _MOCK_CORE_VERSION = "3.1.1"
 
 @pytest.fixture(scope="function", autouse=True)
 def mock_core_version():
-    with patch("taipy.core.config.core_section._read_version") as mock_read_version:
+    with mock.patch("taipy.core.config.core_section._read_version") as mock_read_version:
         mock_read_version.return_value = _MOCK_CORE_VERSION
         CoreSection._CURRENT_CORE_VERSION = _MOCK_CORE_VERSION
         Config.unique_sections[CoreSection.name] = CoreSection.default_config()
         Config._default_config._unique_sections[CoreSection.name] = CoreSection.default_config()
+        Config._python_config._unique_sections[CoreSection.name] = CoreSection.default_config()
 
         yield
 
-
-@pytest.fixture(scope="session", autouse=True)
-def reset_core_version():
-    yield
     CoreSection._CURRENT_CORE_VERSION = _read_version()
 
 

+ 1 - 1
tests/core/data/test_filter_data_node.py

@@ -244,7 +244,7 @@ def test_filter_by_get_item(default_data_frame):
     filtered_custom_dn = custom_dn["a"]
     assert isinstance(filtered_custom_dn, List)
     assert len(filtered_custom_dn) == 10
-    assert filtered_custom_dn == [i for i in range(10)]
+    assert filtered_custom_dn == list(range(10))
 
     filtered_custom_dn = custom_dn[0:5]
     assert isinstance(filtered_custom_dn, List)

+ 139 - 0
tests/core/data/test_filter_parquet_data_node.py

@@ -0,0 +1,139 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+import pathlib
+from importlib import util
+
+import numpy as np
+import pandas as pd
+import pytest
+from pandas.testing import assert_frame_equal
+
+from taipy.config.common.scope import Scope
+from taipy.core.data.operator import JoinOperator, Operator
+from taipy.core.data.parquet import ParquetDataNode
+
+
+@pytest.fixture(scope="function", autouse=True)
+def cleanup():
+    yield
+    path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
+    if os.path.isfile(path):
+        os.remove(path)
+
+
+class MyCustomObject:
+    def __init__(self, id, integer, text):
+        self.id = id
+        self.integer = integer
+        self.text = text
+
+
+class MyOtherCustomObject:
+    def __init__(self, id, sentence):
+        self.id = id
+        self.sentence = sentence
+
+
+def create_custom_class(**kwargs):
+    return MyOtherCustomObject(id=kwargs["id"], sentence=kwargs["text"])
+
+
+class TestFilterParquetDataNode:
+    __engine = ["pyarrow"]
+    if util.find_spec("fastparquet"):
+        __engine.append("fastparquet")
+
+    def test_filter_pandas_exposed_type(self, parquet_file_path):
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": parquet_file_path, "exposed_type": "pandas"})
+        dn.write(
+            [
+                {"foo": 1, "bar": 1},
+                {"foo": 1, "bar": 2},
+                {"foo": 1},
+                {"foo": 2, "bar": 2},
+                {"bar": 2},
+            ]
+        )
+
+        # Test datanode indexing and slicing
+        assert dn["foo"].equals(pd.Series([1, 1, 1, 2, None]))
+        assert dn["bar"].equals(pd.Series([1, 2, None, 2, 2]))
+        assert dn[:2].equals(pd.DataFrame([{"foo": 1.0, "bar": 1.0}, {"foo": 1.0, "bar": 2.0}]))
+
+        # Test filter data
+        filtered_by_filter_method = dn.filter(("foo", 1, Operator.EQUAL))
+        filtered_by_indexing = dn[dn["foo"] == 1]
+        expected_data = pd.DataFrame([{"foo": 1.0, "bar": 1.0}, {"foo": 1.0, "bar": 2.0}, {"foo": 1.0}])
+        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
+        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
+
+        filtered_by_filter_method = dn.filter(("foo", 1, Operator.NOT_EQUAL))
+        filtered_by_indexing = dn[dn["foo"] != 1]
+        expected_data = pd.DataFrame([{"foo": 2.0, "bar": 2.0}, {"bar": 2.0}])
+        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
+        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
+
+        filtered_by_filter_method = dn.filter(("bar", 2, Operator.EQUAL))
+        filtered_by_indexing = dn[dn["bar"] == 2]
+        expected_data = pd.DataFrame([{"foo": 1.0, "bar": 2.0}, {"foo": 2.0, "bar": 2.0}, {"bar": 2.0}])
+        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
+        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
+
+        filtered_by_filter_method = dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR)
+        filtered_by_indexing = dn[(dn["bar"] == 1) | (dn["bar"] == 2)]
+        expected_data = pd.DataFrame(
+            [
+                {"foo": 1.0, "bar": 1.0},
+                {"foo": 1.0, "bar": 2.0},
+                {"foo": 2.0, "bar": 2.0},
+                {"bar": 2.0},
+            ]
+        )
+        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
+        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
+
+    def test_filter_numpy_exposed_type(self, parquet_file_path):
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": parquet_file_path, "exposed_type": "numpy"})
+        dn.write(
+            [
+                [1, 1],
+                [1, 2],
+                [1, 3],
+                [2, 1],
+                [2, 2],
+                [2, 3],
+            ]
+        )
+
+        # Test datanode indexing and slicing
+        assert np.array_equal(dn[0], np.array([1, 1]))
+        assert np.array_equal(dn[1], np.array([1, 2]))
+        assert np.array_equal(dn[:3], np.array([[1, 1], [1, 2], [1, 3]]))
+        assert np.array_equal(dn[:, 0], np.array([1, 1, 1, 2, 2, 2]))
+        assert np.array_equal(dn[1:4, :1], np.array([[1], [1], [2]]))
+
+        # Test filter data
+        assert np.array_equal(dn.filter((0, 1, Operator.EQUAL)), np.array([[1, 1], [1, 2], [1, 3]]))
+        assert np.array_equal(dn[dn[:, 0] == 1], np.array([[1, 1], [1, 2], [1, 3]]))
+
+        assert np.array_equal(dn.filter((0, 1, Operator.NOT_EQUAL)), np.array([[2, 1], [2, 2], [2, 3]]))
+        assert np.array_equal(dn[dn[:, 0] != 1], np.array([[2, 1], [2, 2], [2, 3]]))
+
+        assert np.array_equal(dn.filter((1, 2, Operator.EQUAL)), np.array([[1, 2], [2, 2]]))
+        assert np.array_equal(dn[dn[:, 1] == 2], np.array([[1, 2], [2, 2]]))
+
+        assert np.array_equal(
+            dn.filter([(1, 1, Operator.EQUAL), (1, 2, Operator.EQUAL)], JoinOperator.OR),
+            np.array([[1, 1], [1, 2], [2, 1], [2, 2]]),
+        )
+        assert np.array_equal(dn[(dn[:, 1] == 1) | (dn[:, 1] == 2)], np.array([[1, 1], [1, 2], [2, 1], [2, 2]]))

+ 207 - 0
tests/core/data/test_filter_sql_table_data_node.py

@@ -0,0 +1,207 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from importlib import util
+from unittest.mock import patch
+
+import numpy as np
+import pandas as pd
+from pandas.testing import assert_frame_equal
+
+from taipy.config.common.scope import Scope
+from taipy.core.data.operator import JoinOperator, Operator
+from taipy.core.data.sql_table import SQLTableDataNode
+
+
+class MyCustomObject:
+    def __init__(self, foo=None, bar=None, *args, **kwargs):
+        self.foo = foo
+        self.bar = bar
+        self.args = args
+        self.kwargs = kwargs
+
+
+class TestFilterSQLTableDataNode:
+    __pandas_properties = [
+        {
+            "db_name": "taipy",
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_extra_args": {
+                "TrustServerCertificate": "yes",
+                "other": "value",
+            },
+        },
+    ]
+
+    if util.find_spec("pyodbc"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "mssql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    if util.find_spec("pymysql"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "mysql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    if util.find_spec("psycopg2"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "postgresql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    def test_filter_pandas_exposed_type(self, tmp_sqlite_sqlite3_file_path):
+        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
+        properties = {
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_name": db_name,
+            "sqlite_folder_path": folder_path,
+            "sqlite_file_extension": file_extension,
+            "exposed_type": "pandas",
+        }
+        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=properties)
+        dn.write(
+            pd.DataFrame(
+                [
+                    {"foo": 1, "bar": 1},
+                    {"foo": 1, "bar": 2},
+                    {"foo": 1, "bar": 3},
+                    {"foo": 2, "bar": 1},
+                    {"foo": 2, "bar": 2},
+                    {"foo": 2, "bar": 3},
+                ]
+            )
+        )
+
+        # Test datanode indexing and slicing
+        assert dn["foo"].equals(pd.Series([1, 1, 1, 2, 2, 2]))
+        assert dn["bar"].equals(pd.Series([1, 2, 3, 1, 2, 3]))
+        assert dn[:2].equals(pd.DataFrame([{"foo": 1, "bar": 1}, {"foo": 1, "bar": 2}]))
+
+        # Test filter data
+        filtered_by_filter_method = dn.filter(("foo", 1, Operator.EQUAL))
+        filtered_by_indexing = dn[dn["foo"] == 1]
+        expected_data = pd.DataFrame([{"foo": 1, "bar": 1}, {"foo": 1, "bar": 2}, {"foo": 1, "bar": 3}])
+        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
+        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
+
+        filtered_by_filter_method = dn.filter(("foo", 1, Operator.NOT_EQUAL))
+        filtered_by_indexing = dn[dn["foo"] != 1]
+        expected_data = pd.DataFrame([{"foo": 2, "bar": 1}, {"foo": 2, "bar": 2}, {"foo": 2, "bar": 3}])
+        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
+        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
+
+        filtered_by_filter_method = dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR)
+        filtered_by_indexing = dn[(dn["bar"] == 1) | (dn["bar"] == 2)]
+        expected_data = pd.DataFrame(
+            [
+                {"foo": 1, "bar": 1},
+                {"foo": 1, "bar": 2},
+                {"foo": 2, "bar": 1},
+                {"foo": 2, "bar": 2},
+            ]
+        )
+        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
+        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
+
+    def test_filter_numpy_exposed_type(self, tmp_sqlite_sqlite3_file_path):
+        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
+        properties = {
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_name": db_name,
+            "sqlite_folder_path": folder_path,
+            "sqlite_file_extension": file_extension,
+            "exposed_type": "numpy",
+        }
+        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=properties)
+        dn.write(
+            pd.DataFrame(
+                [
+                    {"foo": 1, "bar": 1},
+                    {"foo": 1, "bar": 2},
+                    {"foo": 1, "bar": 3},
+                    {"foo": 2, "bar": 1},
+                    {"foo": 2, "bar": 2},
+                    {"foo": 2, "bar": 3},
+                ]
+            )
+        )
+
+        # Test datanode indexing and slicing
+        assert np.array_equal(dn[0], np.array([1, 1]))
+        assert np.array_equal(dn[1], np.array([1, 2]))
+        assert np.array_equal(dn[:3], np.array([[1, 1], [1, 2], [1, 3]]))
+        assert np.array_equal(dn[:, 0], np.array([1, 1, 1, 2, 2, 2]))
+        assert np.array_equal(dn[1:4, :1], np.array([[1], [1], [2]]))
+
+        # Test filter data
+        assert np.array_equal(dn.filter(("foo", 1, Operator.EQUAL)), np.array([[1, 1], [1, 2], [1, 3]]))
+        assert np.array_equal(dn[dn[:, 0] == 1], np.array([[1, 1], [1, 2], [1, 3]]))
+
+        assert np.array_equal(dn.filter(("foo", 1, Operator.NOT_EQUAL)), np.array([[2, 1], [2, 2], [2, 3]]))
+        assert np.array_equal(dn[dn[:, 0] != 1], np.array([[2, 1], [2, 2], [2, 3]]))
+
+        assert np.array_equal(dn.filter(("bar", 2, Operator.EQUAL)), np.array([[1, 2], [2, 2]]))
+        assert np.array_equal(dn[dn[:, 1] == 2], np.array([[1, 2], [2, 2]]))
+
+        assert np.array_equal(
+            dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR),
+            np.array([[1, 1], [1, 2], [2, 1], [2, 2]]),
+        )
+        assert np.array_equal(dn[(dn[:, 1] == 1) | (dn[:, 1] == 2)], np.array([[1, 1], [1, 2], [2, 1], [2, 2]]))
+
+    def test_filter_does_not_read_all_entities(self, tmp_sqlite_sqlite3_file_path):
+        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
+        properties = {
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_name": db_name,
+            "sqlite_folder_path": folder_path,
+            "sqlite_file_extension": file_extension,
+            "exposed_type": "numpy",
+        }
+        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=properties)
+
+        # SQLTableDataNode.filter() should not call the MongoCollectionDataNode._read() method
+        with patch.object(SQLTableDataNode, "_read") as read_mock:
+            dn.filter(("foo", 1, Operator.EQUAL))
+            dn.filter(("bar", 2, Operator.NOT_EQUAL))
+            dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR)
+
+            assert read_mock["_read"].call_count == 0

+ 2 - 2
tests/core/data/test_generic_data_node.py

@@ -46,11 +46,11 @@ def read_fct_modify_data_node_name(data_node_id: DataNodeId, name: str):
 
 
 def reset_data():
-    TestGenericDataNode.data = [i for i in range(10)]
+    TestGenericDataNode.data = list(range(10))
 
 
 class TestGenericDataNode:
-    data = [i for i in range(10)]
+    data = list(range(10))
 
     def test_create(self):
         dn = GenericDataNode(

+ 0 - 327
tests/core/data/test_parquet_data_node.py

@@ -15,21 +15,17 @@ from datetime import datetime
 from importlib import util
 from time import sleep
 
-import numpy as np
 import pandas as pd
 import pytest
-from pandas.testing import assert_frame_equal
 
 from taipy.config.common.scope import Scope
 from taipy.config.config import Config
 from taipy.config.exceptions.exceptions import InvalidConfigurationId
 from taipy.core.data._data_manager import _DataManager
 from taipy.core.data.data_node_id import DataNodeId
-from taipy.core.data.operator import JoinOperator, Operator
 from taipy.core.data.parquet import ParquetDataNode
 from taipy.core.exceptions.exceptions import (
     InvalidExposedType,
-    NoData,
     UnknownCompressionAlgorithm,
     UnknownParquetEngine,
 )
@@ -143,81 +139,12 @@ class TestParquetDataNode:
         data_modin = parquet_data_node_as_modin.read()
         assert isinstance(data_modin, pd.DataFrame)
 
-    @pytest.mark.parametrize("engine", __engine)
-    def test_read_file(self, engine, parquet_file_path):
-        not_existing_parquet = ParquetDataNode(
-            "foo", Scope.SCENARIO, properties={"path": "nonexistent.parquet", "engine": engine}
-        )
-        with pytest.raises(NoData):
-            assert not_existing_parquet.read() is None
-            not_existing_parquet.read_or_raise()
-
-        df = pd.read_parquet(parquet_file_path)
-        # Create ParquetDataNode without exposed_type (Default is pandas.DataFrame)
-        parquet_data_node_as_pandas = ParquetDataNode(
-            "bar", Scope.SCENARIO, properties={"path": parquet_file_path, "engine": engine}
-        )
-        data_pandas = parquet_data_node_as_pandas.read()
-        assert isinstance(data_pandas, pd.DataFrame)
-        assert len(data_pandas) == 2
-        assert data_pandas.equals(df)
-        assert np.array_equal(data_pandas.to_numpy(), df.to_numpy())
-
-        # Create ParquetDataNode with numpy exposed_type
-        parquet_data_node_as_numpy = ParquetDataNode(
-            "bar", Scope.SCENARIO, properties={"path": parquet_file_path, "exposed_type": "numpy", "engine": engine}
-        )
-        data_numpy = parquet_data_node_as_numpy.read()
-        assert isinstance(data_numpy, np.ndarray)
-        assert len(data_numpy) == 2
-        assert np.array_equal(data_numpy, df.to_numpy())
-
-    @pytest.mark.parametrize("engine", __engine)
-    def test_read_folder(self, engine):
-        parquet_folder_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/parquet_example")
-
-        df = pd.read_parquet(parquet_folder_path)
-        parquet_data_node_as_pandas = ParquetDataNode(
-            "bar", Scope.SCENARIO, properties={"path": parquet_folder_path, "engine": engine}
-        )
-        data_pandas = parquet_data_node_as_pandas.read()
-        assert isinstance(data_pandas, pd.DataFrame)
-        assert len(data_pandas) == 5
-        assert data_pandas.equals(df)
-        assert np.array_equal(data_pandas.to_numpy(), df.to_numpy())
-
     def test_set_path(self):
         dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": "foo.parquet"})
         assert dn.path == "foo.parquet"
         dn.path = "bar.parquet"
         assert dn.path == "bar.parquet"
 
-    @pytest.mark.parametrize("engine", __engine)
-    def test_read_write_after_modify_path(self, engine):
-        path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
-        new_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "engine": engine})
-        read_data = dn.read()
-        assert read_data is not None
-        dn.path = new_path
-        with pytest.raises(FileNotFoundError):
-            dn.read()
-        dn.write(read_data)
-        assert dn.read().equals(read_data)
-
-    def test_read_custom_exposed_type(self):
-        example_parquet_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
-
-        dn = ParquetDataNode(
-            "foo", Scope.SCENARIO, properties={"path": example_parquet_path, "exposed_type": MyCustomObject}
-        )
-        assert all(isinstance(obj, MyCustomObject) for obj in dn.read())
-
-        dn = ParquetDataNode(
-            "foo", Scope.SCENARIO, properties={"path": example_parquet_path, "exposed_type": create_custom_class}
-        )
-        assert all(isinstance(obj, MyOtherCustomObject) for obj in dn.read())
-
     def test_raise_error_unknown_parquet_engine(self):
         path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
         with pytest.raises(UnknownParquetEngine):
@@ -233,23 +160,6 @@ class TestParquetDataNode:
         with pytest.raises(InvalidExposedType):
             ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "foo"})
 
-    def test_read_empty_data(self, tmpdir_factory):
-        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
-        empty_df = pd.DataFrame([])
-        empty_df.to_parquet(temp_file_path)
-
-        # Pandas
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "pandas"})
-        assert dn.read().equals(empty_df)
-
-        # Numpy
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "numpy"})
-        assert np.array_equal(dn.read(), empty_df.to_numpy())
-
-        # Custom
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": MyCustomObject})
-        assert dn.read() == []
-
     def test_get_system_file_modified_date_instead_of_last_edit_date(self, tmpdir_factory):
         temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
         pd.DataFrame([]).to_parquet(temp_file_path)
@@ -297,243 +207,6 @@ class TestParquetDataNode:
 
         os.unlink(temp_file_path)
 
-    @pytest.mark.skipif(not util.find_spec("fastparquet"), reason="Append parquet requires fastparquet to be installed")
-    @pytest.mark.parametrize(
-        "content",
-        [
-            ([{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}]),
-            (pd.DataFrame([{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}])),
-        ],
-    )
-    def test_append_pandas(self, parquet_file_path, default_data_frame, content):
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": parquet_file_path})
-        assert_frame_equal(dn.read(), default_data_frame)
-
-        dn.append(content)
-        assert_frame_equal(
-            dn.read(),
-            pd.concat([default_data_frame, pd.DataFrame(content, columns=["a", "b", "c"])]).reset_index(drop=True),
-        )
-
-    @pytest.mark.parametrize(
-        "data",
-        [
-            [{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}],
-            pd.DataFrame([{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}]),
-        ],
-    )
-    def test_write_to_disk(self, tmpdir_factory, data):
-        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path})
-        dn.write(data)
-
-        assert pathlib.Path(temp_file_path).exists()
-        assert isinstance(dn.read(), pd.DataFrame)
-
-    def test_filter_pandas_exposed_type(self, parquet_file_path):
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": parquet_file_path, "exposed_type": "pandas"})
-        dn.write(
-            [
-                {"foo": 1, "bar": 1},
-                {"foo": 1, "bar": 2},
-                {"foo": 1},
-                {"foo": 2, "bar": 2},
-                {"bar": 2},
-            ]
-        )
-
-        # Test datanode indexing and slicing
-        assert dn["foo"].equals(pd.Series([1, 1, 1, 2, None]))
-        assert dn["bar"].equals(pd.Series([1, 2, None, 2, 2]))
-        assert dn[:2].equals(pd.DataFrame([{"foo": 1.0, "bar": 1.0}, {"foo": 1.0, "bar": 2.0}]))
-
-        # Test filter data
-        filtered_by_filter_method = dn.filter(("foo", 1, Operator.EQUAL))
-        filtered_by_indexing = dn[dn["foo"] == 1]
-        expected_data = pd.DataFrame([{"foo": 1.0, "bar": 1.0}, {"foo": 1.0, "bar": 2.0}, {"foo": 1.0}])
-        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
-        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
-
-        filtered_by_filter_method = dn.filter(("foo", 1, Operator.NOT_EQUAL))
-        filtered_by_indexing = dn[dn["foo"] != 1]
-        expected_data = pd.DataFrame([{"foo": 2.0, "bar": 2.0}, {"bar": 2.0}])
-        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
-        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
-
-        filtered_by_filter_method = dn.filter(("bar", 2, Operator.EQUAL))
-        filtered_by_indexing = dn[dn["bar"] == 2]
-        expected_data = pd.DataFrame([{"foo": 1.0, "bar": 2.0}, {"foo": 2.0, "bar": 2.0}, {"bar": 2.0}])
-        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
-        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
-
-        filtered_by_filter_method = dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR)
-        filtered_by_indexing = dn[(dn["bar"] == 1) | (dn["bar"] == 2)]
-        expected_data = pd.DataFrame(
-            [
-                {"foo": 1.0, "bar": 1.0},
-                {"foo": 1.0, "bar": 2.0},
-                {"foo": 2.0, "bar": 2.0},
-                {"bar": 2.0},
-            ]
-        )
-        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
-        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
-
-    def test_filter_numpy_exposed_type(self, parquet_file_path):
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": parquet_file_path, "exposed_type": "numpy"})
-        dn.write(
-            [
-                [1, 1],
-                [1, 2],
-                [1, 3],
-                [2, 1],
-                [2, 2],
-                [2, 3],
-            ]
-        )
-
-        # Test datanode indexing and slicing
-        assert np.array_equal(dn[0], np.array([1, 1]))
-        assert np.array_equal(dn[1], np.array([1, 2]))
-        assert np.array_equal(dn[:3], np.array([[1, 1], [1, 2], [1, 3]]))
-        assert np.array_equal(dn[:, 0], np.array([1, 1, 1, 2, 2, 2]))
-        assert np.array_equal(dn[1:4, :1], np.array([[1], [1], [2]]))
-
-        # Test filter data
-        assert np.array_equal(dn.filter((0, 1, Operator.EQUAL)), np.array([[1, 1], [1, 2], [1, 3]]))
-        assert np.array_equal(dn[dn[:, 0] == 1], np.array([[1, 1], [1, 2], [1, 3]]))
-
-        assert np.array_equal(dn.filter((0, 1, Operator.NOT_EQUAL)), np.array([[2, 1], [2, 2], [2, 3]]))
-        assert np.array_equal(dn[dn[:, 0] != 1], np.array([[2, 1], [2, 2], [2, 3]]))
-
-        assert np.array_equal(dn.filter((1, 2, Operator.EQUAL)), np.array([[1, 2], [2, 2]]))
-        assert np.array_equal(dn[dn[:, 1] == 2], np.array([[1, 2], [2, 2]]))
-
-        assert np.array_equal(
-            dn.filter([(1, 1, Operator.EQUAL), (1, 2, Operator.EQUAL)], JoinOperator.OR),
-            np.array([[1, 1], [1, 2], [2, 1], [2, 2]]),
-        )
-        assert np.array_equal(dn[(dn[:, 1] == 1) | (dn[:, 1] == 2)], np.array([[1, 1], [1, 2], [2, 1], [2, 2]]))
-
-    @pytest.mark.parametrize("engine", __engine)
-    def test_pandas_parquet_config_kwargs(self, engine, tmpdir_factory):
-        read_kwargs = {"filters": [("integer", "<", 10)], "columns": ["integer"]}
-        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
-        dn = ParquetDataNode(
-            "foo", Scope.SCENARIO, properties={"path": temp_file_path, "engine": engine, "read_kwargs": read_kwargs}
-        )
-
-        df = pd.read_csv(os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.csv"))
-        dn.write(df)
-
-        assert set(pd.read_parquet(temp_file_path).columns) == {"id", "integer", "text"}
-        assert set(dn.read().columns) == set(read_kwargs["columns"])
-
-        # !!! filter doesn't work with `fastparquet` without partition_cols
-        if engine == "pyarrow":
-            assert len(dn.read()) != len(df)
-            assert len(dn.read()) == 2
-
-    @pytest.mark.parametrize("engine", __engine)
-    def test_kwarg_precedence(self, engine, tmpdir_factory, default_data_frame):
-        # Precedence:
-        # 1. Class read/write methods
-        # 2. Defined in read_kwargs and write_kwargs, in properties
-        # 3. Defined top-level in properties
-
-        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
-        temp_file_2_path = str(tmpdir_factory.mktemp("data").join("temp_2.parquet"))
-        df = default_data_frame.copy(deep=True)
-
-        # Write
-        # 3
-        comp3 = "snappy"
-        dn = ParquetDataNode(
-            "foo", Scope.SCENARIO, properties={"path": temp_file_path, "engine": engine, "compression": comp3}
-        )
-        dn.write(df)
-        df.to_parquet(path=temp_file_2_path, compression=comp3, engine=engine)
-        with open(temp_file_2_path, "rb") as tf:
-            with pathlib.Path(temp_file_path).open("rb") as f:
-                assert f.read() == tf.read()
-
-        # 3 and 2
-        comp2 = "gzip"
-        dn = ParquetDataNode(
-            "foo",
-            Scope.SCENARIO,
-            properties={
-                "path": temp_file_path,
-                "engine": engine,
-                "compression": comp3,
-                "write_kwargs": {"compression": comp2},
-            },
-        )
-        dn.write(df)
-        df.to_parquet(path=temp_file_2_path, compression=comp2, engine=engine)
-        with open(temp_file_2_path, "rb") as tf:
-            with pathlib.Path(temp_file_path).open("rb") as f:
-                assert f.read() == tf.read()
-
-        # 3, 2 and 1
-        comp1 = "brotli"
-        dn = ParquetDataNode(
-            "foo",
-            Scope.SCENARIO,
-            properties={
-                "path": temp_file_path,
-                "engine": engine,
-                "compression": comp3,
-                "write_kwargs": {"compression": comp2},
-            },
-        )
-        dn.write_with_kwargs(df, compression=comp1)
-        df.to_parquet(path=temp_file_2_path, compression=comp1, engine=engine)
-        with open(temp_file_2_path, "rb") as tf:
-            with pathlib.Path(temp_file_path).open("rb") as f:
-                assert f.read() == tf.read()
-
-        # Read
-        df.to_parquet(temp_file_path, engine=engine)
-        # 2
-        cols2 = ["a", "b"]
-        dn = ParquetDataNode(
-            "foo",
-            Scope.SCENARIO,
-            properties={"path": temp_file_path, "engine": engine, "read_kwargs": {"columns": cols2}},
-        )
-        assert set(dn.read().columns) == set(cols2)
-
-        # 1
-        cols1 = ["a"]
-        dn = ParquetDataNode(
-            "foo",
-            Scope.SCENARIO,
-            properties={"path": temp_file_path, "engine": engine, "read_kwargs": {"columns": cols2}},
-        )
-        assert set(dn.read_with_kwargs(columns=cols1).columns) == set(cols1)
-
-    def test_partition_cols(self, tmpdir_factory, default_data_frame: pd.DataFrame):
-        temp_dir_path = str(tmpdir_factory.mktemp("data").join("temp_dir"))
-
-        write_kwargs = {"partition_cols": ["a", "b"]}
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_dir_path, "write_kwargs": write_kwargs})  # type: ignore
-        dn.write(default_data_frame)
-
-        assert pathlib.Path(temp_dir_path).is_dir()
-        # dtypes change during round-trip with partition_cols
-        pd.testing.assert_frame_equal(
-            dn.read().sort_index(axis=1),
-            default_data_frame.sort_index(axis=1),
-            check_dtype=False,
-            check_categorical=False,
-        )
-
-    def test_read_with_kwargs_never_written(self):
-        path = "data/node/path"
-        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path})
-        assert dn.read_with_kwargs() is None
-
     def test_migrate_to_new_path(self, tmp_path):
         _base_path = os.path.join(tmp_path, ".data")
         path = os.path.join(_base_path, "test.parquet")

+ 188 - 0
tests/core/data/test_read_parquet_data_node.py

@@ -0,0 +1,188 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+import pathlib
+from importlib import util
+
+import numpy as np
+import pandas as pd
+import pytest
+
+from taipy.config.common.scope import Scope
+from taipy.core.data.parquet import ParquetDataNode
+from taipy.core.exceptions.exceptions import NoData
+
+
+@pytest.fixture(scope="function", autouse=True)
+def cleanup():
+    yield
+    path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
+    if os.path.isfile(path):
+        os.remove(path)
+
+
+class MyCustomObject:
+    def __init__(self, id, integer, text):
+        self.id = id
+        self.integer = integer
+        self.text = text
+
+
+class MyOtherCustomObject:
+    def __init__(self, id, sentence):
+        self.id = id
+        self.sentence = sentence
+
+
+class MyCustomXYObject:
+    def __init__(self, x, y):
+        self.x = x
+        self.y = y
+
+
+def create_custom_class(**kwargs):
+    return MyOtherCustomObject(id=kwargs["id"], sentence=kwargs["text"])
+
+
+def create_custom_xy_class(**kwargs):
+    return MyCustomXYObject(x=kwargs["x"], y=kwargs["y"])
+
+
+class TestReadParquetDataNode:
+    __engine = ["pyarrow"]
+    if util.find_spec("fastparquet"):
+        __engine.append("fastparquet")
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_raise_no_data(self, engine, parquet_file_path):
+        not_existing_parquet = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": "nonexistent.parquet", "engine": engine}
+        )
+        with pytest.raises(NoData):
+            assert not_existing_parquet.read() is None
+            not_existing_parquet.read_or_raise()
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_read_parquet_file_pandas(self, engine, parquet_file_path):
+        df = pd.read_parquet(parquet_file_path)
+        parquet_data_node_as_pandas = ParquetDataNode(
+            "bar", Scope.SCENARIO, properties={"path": parquet_file_path, "engine": engine}
+        )
+        data_pandas = parquet_data_node_as_pandas.read()
+        assert isinstance(data_pandas, pd.DataFrame)
+        assert len(data_pandas) == 2
+        assert data_pandas.equals(df)
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_read_parquet_file_numpy(self, engine, parquet_file_path):
+        df = pd.read_parquet(parquet_file_path)
+        parquet_data_node_as_numpy = ParquetDataNode(
+            "bar", Scope.SCENARIO, properties={"path": parquet_file_path, "exposed_type": "numpy", "engine": engine}
+        )
+        data_numpy = parquet_data_node_as_numpy.read()
+        assert isinstance(data_numpy, np.ndarray)
+        assert len(data_numpy) == 2
+        assert np.array_equal(data_numpy, df.to_numpy())
+
+    def test_read_custom_exposed_type(self):
+        example_parquet_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
+
+        dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": example_parquet_path, "exposed_type": MyCustomObject}
+        )
+        assert all(isinstance(obj, MyCustomObject) for obj in dn.read())
+
+        dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": example_parquet_path, "exposed_type": create_custom_class}
+        )
+        assert all(isinstance(obj, MyOtherCustomObject) for obj in dn.read())
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_read_parquet_folder_pandas(self, engine):
+        parquet_folder_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/parquet_example")
+
+        df = pd.read_parquet(parquet_folder_path)
+        parquet_data_node_as_pandas = ParquetDataNode(
+            "bar", Scope.SCENARIO, properties={"path": parquet_folder_path, "engine": engine}
+        )
+        data_pandas = parquet_data_node_as_pandas.read()
+        assert isinstance(data_pandas, pd.DataFrame)
+        assert len(data_pandas) == 5
+        assert data_pandas.equals(df)
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_read_parquet_folder_numpy(self, engine):
+        parquet_folder_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/parquet_example")
+
+        df = pd.read_parquet(parquet_folder_path)
+        parquet_data_node_as_pandas = ParquetDataNode(
+            "bar", Scope.SCENARIO, properties={"path": parquet_folder_path, "engine": engine, "exposed_type": "numpy"}
+        )
+        data_numpy = parquet_data_node_as_pandas.read()
+        assert isinstance(data_numpy, np.ndarray)
+        assert len(data_numpy) == 5
+        assert np.array_equal(data_numpy, df.to_numpy())
+
+    def test_read_folder_custom_exposed_type(self):
+        example_parquet_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/parquet_example")
+
+        dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": example_parquet_path, "exposed_type": MyCustomXYObject}
+        )
+        dn.read()
+        assert all(isinstance(obj, MyCustomXYObject) for obj in dn.read())
+
+        dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": example_parquet_path, "exposed_type": create_custom_xy_class}
+        )
+        assert all(isinstance(obj, MyCustomXYObject) for obj in dn.read())
+
+    def test_read_empty_data(self, tmpdir_factory):
+        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
+        empty_df = pd.DataFrame([])
+        empty_df.to_parquet(temp_file_path)
+
+        # Pandas
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "pandas"})
+        assert dn.read().equals(empty_df)
+
+        # Numpy
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "numpy"})
+        assert np.array_equal(dn.read(), empty_df.to_numpy())
+
+        # Custom
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": MyCustomObject})
+        assert dn.read() == []
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_read_pandas_parquet_config_kwargs(self, engine, tmpdir_factory):
+        read_kwargs = {"filters": [("integer", "<", 10)], "columns": ["integer"]}
+        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
+        dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": temp_file_path, "engine": engine, "read_kwargs": read_kwargs}
+        )
+
+        df = pd.read_csv(os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.csv"))
+        dn.write(df)
+
+        assert set(pd.read_parquet(temp_file_path).columns) == {"id", "integer", "text"}
+        assert set(dn.read().columns) == set(read_kwargs["columns"])
+
+        # !!! filter doesn't work with `fastparquet` without partition_cols
+        if engine == "pyarrow":
+            assert len(dn.read()) != len(df)
+            assert len(dn.read()) == 2
+
+    def test_read_with_kwargs_never_written(self):
+        path = "data/node/path"
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path})
+        assert dn.read_with_kwargs() is None

+ 176 - 0
tests/core/data/test_read_sql_table_data_node.py

@@ -0,0 +1,176 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from importlib import util
+from unittest.mock import patch
+
+import numpy as np
+import pandas as pd
+import pytest
+
+from taipy.config.common.scope import Scope
+from taipy.core.data.sql_table import SQLTableDataNode
+
+
+class MyCustomObject:
+    def __init__(self, foo=None, bar=None, *args, **kwargs):
+        self.foo = foo
+        self.bar = bar
+        self.args = args
+        self.kwargs = kwargs
+
+
+class TestReadSQLTableDataNode:
+    __pandas_properties = [
+        {
+            "db_name": "taipy",
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_extra_args": {
+                "TrustServerCertificate": "yes",
+                "other": "value",
+            },
+        },
+    ]
+
+    if util.find_spec("pyodbc"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "mssql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    if util.find_spec("pymysql"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "mysql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    if util.find_spec("psycopg2"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "postgresql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    @staticmethod
+    def mock_read_value():
+        return {"foo": ["baz", "quux", "corge"], "bar": ["quux", "quuz", None]}
+
+    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
+    def test_read_pandas(self, pandas_properties):
+        custom_properties = pandas_properties.copy()
+
+        sql_data_node_as_pandas = SQLTableDataNode(
+            "foo",
+            Scope.SCENARIO,
+            properties=custom_properties,
+        )
+
+        with patch("sqlalchemy.engine.Engine.connect") as engine_mock:
+            cursor_mock = engine_mock.return_value.__enter__.return_value
+            cursor_mock.execute.return_value = self.mock_read_value()
+
+            pandas_data = sql_data_node_as_pandas.read()
+            assert isinstance(pandas_data, pd.DataFrame)
+            assert pandas_data.equals(pd.DataFrame(self.mock_read_value()))
+
+    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
+    def test_read_numpy(self, pandas_properties):
+        custom_properties = pandas_properties.copy()
+        custom_properties["exposed_type"] = "numpy"
+
+        sql_data_node_as_pandas = SQLTableDataNode(
+            "foo",
+            Scope.SCENARIO,
+            properties=custom_properties,
+        )
+
+        with patch("sqlalchemy.engine.Engine.connect") as engine_mock:
+            cursor_mock = engine_mock.return_value.__enter__.return_value
+            cursor_mock.execute.return_value = self.mock_read_value()
+
+            numpy_data = sql_data_node_as_pandas.read()
+            assert isinstance(numpy_data, np.ndarray)
+            assert np.array_equal(numpy_data, pd.DataFrame(self.mock_read_value()).to_numpy())
+
+    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
+    def test_read_custom_exposed_type(self, pandas_properties):
+        custom_properties = pandas_properties.copy()
+
+        custom_properties.pop("db_extra_args")
+        custom_properties["exposed_type"] = MyCustomObject
+        sql_data_node = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
+
+        mock_return_data = [
+            {"foo": "baz", "bar": "qux"},
+            {"foo": "quux", "bar": "quuz"},
+            {"foo": "corge"},
+            {"bar": "grault"},
+            {"KWARGS_KEY": "KWARGS_VALUE"},
+            {},
+        ]
+
+        with patch("sqlalchemy.engine.Engine.connect") as engine_mock:
+            cursor_mock = engine_mock.return_value.__enter__.return_value
+            cursor_mock.execute.return_value = mock_return_data
+            custom_data = sql_data_node.read()
+
+        for row_mock_data, row_custom in zip(mock_return_data, custom_data):
+            assert isinstance(row_custom, MyCustomObject)
+            assert row_custom.foo == row_mock_data.pop("foo", None)
+            assert row_custom.bar == row_mock_data.pop("bar", None)
+            assert row_custom.kwargs == row_mock_data
+
+    @pytest.mark.parametrize(
+        "tmp_sqlite_path",
+        [
+            "tmp_sqlite_db_file_path",
+            "tmp_sqlite_sqlite3_file_path",
+        ],
+    )
+    def test_sqlite_read_file_with_different_extension(self, tmp_sqlite_path, request):
+        tmp_sqlite_path = request.getfixturevalue(tmp_sqlite_path)
+        folder_path, db_name, file_extension = tmp_sqlite_path
+        properties = {
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_name": db_name,
+            "sqlite_folder_path": folder_path,
+            "sqlite_file_extension": file_extension,
+        }
+
+        dn = SQLTableDataNode("sqlite_dn", Scope.SCENARIO, properties=properties)
+        data = dn.read()
+
+        assert data.equals(pd.DataFrame([{"foo": 1, "bar": 2}, {"foo": 3, "bar": 4}]))

+ 1 - 326
tests/core/data/test_sql_table_data_node.py

@@ -12,14 +12,10 @@
 from importlib import util
 from unittest.mock import patch
 
-import numpy as np
-import pandas as pd
 import pytest
-from pandas.testing import assert_frame_equal
 
 from taipy.config.common.scope import Scope
 from taipy.core.data.data_node_id import DataNodeId
-from taipy.core.data.operator import JoinOperator, Operator
 from taipy.core.data.sql_table import SQLTableDataNode
 from taipy.core.exceptions.exceptions import InvalidExposedType, MissingRequiredProperty
 
@@ -140,122 +136,6 @@ class TestSQLTableDataNode:
         assert sql_data_node_as_modin.properties["exposed_type"] == "pandas"
         assert sql_data_node_as_modin.read() == "pandas"
 
-    @patch("taipy.core.data.sql_table.SQLTableDataNode._read_as", return_value="custom")
-    @patch("taipy.core.data.sql_table.SQLTableDataNode._read_as_pandas_dataframe", return_value="pandas")
-    @patch("taipy.core.data.sql_table.SQLTableDataNode._read_as_numpy", return_value="numpy")
-    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
-    def test_read(
-        self,
-        mock_read_as,
-        mock_read_as_pandas_dataframe,
-        mock_read_as_numpy,
-        pandas_properties,
-    ):
-        custom_properties = pandas_properties.copy()
-        # Create SQLTableDataNode without exposed_type (Default is pandas.DataFrame)
-        sql_data_node_as_pandas = SQLTableDataNode(
-            "foo",
-            Scope.SCENARIO,
-            properties=pandas_properties,
-        )
-
-        assert sql_data_node_as_pandas.read() == "pandas"
-
-        custom_properties.pop("db_extra_args")
-        custom_properties["exposed_type"] = MyCustomObject
-        # Create the same SQLTableDataNode but with custom exposed_type
-        sql_data_node_as_custom_object = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
-        assert sql_data_node_as_custom_object.read() == "custom"
-
-        # Create the same SQLDataSource but with numpy exposed_type
-        custom_properties["exposed_type"] = "numpy"
-        sql_data_source_as_numpy_object = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
-
-        assert sql_data_source_as_numpy_object.read() == "numpy"
-
-
-    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
-    def test_read_as(self, pandas_properties):
-        custom_properties = pandas_properties.copy()
-
-        custom_properties.pop("db_extra_args")
-        custom_properties["exposed_type"] = MyCustomObject
-        sql_data_node = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
-
-        with patch("sqlalchemy.engine.Engine.connect") as engine_mock:
-            cursor_mock = engine_mock.return_value.__enter__.return_value
-            cursor_mock.execute.return_value = [
-                {"foo": "baz", "bar": "qux"},
-                {"foo": "quux", "bar": "quuz"},
-                {"foo": "corge"},
-                {"bar": "grault"},
-                {"KWARGS_KEY": "KWARGS_VALUE"},
-                {},
-            ]
-            data = sql_data_node._read_as()
-
-        assert isinstance(data, list)
-        assert isinstance(data[0], MyCustomObject)
-        assert isinstance(data[1], MyCustomObject)
-        assert isinstance(data[2], MyCustomObject)
-        assert isinstance(data[3], MyCustomObject)
-        assert isinstance(data[4], MyCustomObject)
-        assert isinstance(data[5], MyCustomObject)
-
-        assert data[0].foo == "baz"
-        assert data[0].bar == "qux"
-        assert data[1].foo == "quux"
-        assert data[1].bar == "quuz"
-        assert data[2].foo == "corge"
-        assert data[2].bar is None
-        assert data[3].foo is None
-        assert data[3].bar == "grault"
-        assert data[4].foo is None
-        assert data[4].bar is None
-        assert data[4].kwargs["KWARGS_KEY"] == "KWARGS_VALUE"
-        assert data[5].foo is None
-        assert data[5].bar is None
-        assert len(data[5].args) == 0
-        assert len(data[5].kwargs) == 0
-
-        with patch("sqlalchemy.engine.Engine.connect") as engine_mock:
-            cursor_mock = engine_mock.return_value.__enter__.return_value
-            cursor_mock.execute.return_value = []
-            data_2 = sql_data_node._read_as()
-        assert isinstance(data_2, list)
-        assert len(data_2) == 0
-
-    @pytest.mark.parametrize(
-        "data,written_data,called_func",
-        [
-            ([{"a": 1, "b": 2}, {"a": 3, "b": 4}], [{"a": 1, "b": 2}, {"a": 3, "b": 4}], "__insert_dicts"),
-            ({"a": 1, "b": 2}, [{"a": 1, "b": 2}], "__insert_dicts"),
-            ([(1, 2), (3, 4)], [(1, 2), (3, 4)], "__insert_tuples"),
-            ([[1, 2], [3, 4]], [[1, 2], [3, 4]], "__insert_tuples"),
-            ((1, 2), [(1, 2)], "__insert_tuples"),
-            ([1, 2, 3, 4], [(1,), (2,), (3,), (4,)], "__insert_tuples"),
-            ("foo", [("foo",)], "__insert_tuples"),
-            (None, [(None,)], "__insert_tuples"),
-            (np.array([1, 2, 3, 4]), [(1,), (2,), (3,), (4,)], "__insert_tuples"),
-            (np.array([np.array([1, 2]), np.array([3, 4])]), [[1, 2], [3, 4]], "__insert_tuples"),
-        ],
-    )
-    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
-    def test_write_1(self, data, written_data, called_func, pandas_properties):
-        custom_properties = pandas_properties.copy()
-        custom_properties.pop("db_extra_args")
-        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
-
-        with patch("sqlalchemy.engine.Engine.connect") as engine_mock, patch(
-            "taipy.core.data.sql_table.SQLTableDataNode._create_table"
-        ) as create_table_mock:
-            cursor_mock = engine_mock.return_value.__enter__.return_value
-            cursor_mock.execute.side_effect = None
-
-            with patch(f"taipy.core.data.sql_table.SQLTableDataNode._SQLTableDataNode{called_func}") as mck:
-                dn.write(data)
-                mck.assert_called_once_with(written_data, create_table_mock.return_value, cursor_mock, True)
-
     @pytest.mark.parametrize("pandas_properties", __pandas_properties)
     def test_raise_error_invalid_exposed_type(self, pandas_properties):
         custom_properties = pandas_properties.copy()
@@ -264,47 +144,6 @@ class TestSQLTableDataNode:
         with pytest.raises(InvalidExposedType):
             SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
 
-    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
-    def test_write_dataframe(self, pandas_properties):
-        # test write pandas dataframe
-        custom_properties = pandas_properties.copy()
-        custom_properties.pop("db_extra_args")
-        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
-
-        df = pd.DataFrame({"a": [1, 2, 3, 4], "b": [5, 6, 7, 8]})
-        with patch("sqlalchemy.engine.Engine.connect") as engine_mock, patch(
-            "taipy.core.data.sql_table.SQLTableDataNode._create_table"
-        ):
-            cursor_mock = engine_mock.return_value.__enter__.return_value
-            cursor_mock.execute.side_effect = None
-
-            with patch("taipy.core.data.sql_table.SQLTableDataNode._SQLTableDataNode__insert_dataframe") as mck:
-                dn.write(df)
-                assert mck.call_args[0][0].equals(df)
-
-    @pytest.mark.parametrize(
-        "data",
-        [
-            [],
-            np.array([]),
-        ],
-    )
-    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
-    def test_write_empty_list(self, data, pandas_properties):
-        custom_properties = pandas_properties.copy()
-        custom_properties.pop("db_extra_args")
-        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
-
-        with patch("sqlalchemy.engine.Engine.connect") as engine_mock, patch(
-            "taipy.core.data.sql_table.SQLTableDataNode._create_table"
-        ) as create_table_mock:
-            cursor_mock = engine_mock.return_value.__enter__.return_value
-            cursor_mock.execute.side_effect = None
-
-            with patch("taipy.core.data.sql_table.SQLTableDataNode._SQLTableDataNode__delete_all_rows") as mck:
-                dn.write(data)
-                mck.assert_called_once_with(create_table_mock.return_value, cursor_mock, True)
-
     @pytest.mark.parametrize("pandas_properties", __pandas_properties)
     @patch("pandas.read_sql_query")
     def test_engine_cache(self, _, pandas_properties):
@@ -328,172 +167,8 @@ class TestSQLTableDataNode:
             dn.db_username = "foo"
             assert dn._engine is None
 
-            dn.write(1)
+            dn.write({})
             assert dn._engine is not None
 
             dn.some_random_attribute_that_does_not_related_to_engine = "foo"
             assert dn._engine is not None
-
-    @pytest.mark.parametrize(
-        "tmp_sqlite_path",
-        [
-            "tmp_sqlite_db_file_path",
-            "tmp_sqlite_sqlite3_file_path",
-        ],
-    )
-    def test_sqlite_read_file_with_different_extension(self, tmp_sqlite_path, request):
-        tmp_sqlite_path = request.getfixturevalue(tmp_sqlite_path)
-        folder_path, db_name, file_extension = tmp_sqlite_path
-        properties = {
-            "db_engine": "sqlite",
-            "table_name": "example",
-            "db_name": db_name,
-            "sqlite_folder_path": folder_path,
-            "sqlite_file_extension": file_extension,
-        }
-
-        dn = SQLTableDataNode("sqlite_dn", Scope.SCENARIO, properties=properties)
-        data = dn.read()
-
-        assert data.equals(pd.DataFrame([{"foo": 1, "bar": 2}, {"foo": 3, "bar": 4}]))
-
-    def test_sqlite_append_pandas(self, tmp_sqlite_sqlite3_file_path):
-        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
-        properties = {
-            "db_engine": "sqlite",
-            "table_name": "example",
-            "db_name": db_name,
-            "sqlite_folder_path": folder_path,
-            "sqlite_file_extension": file_extension,
-        }
-
-        dn = SQLTableDataNode("sqlite_dn", Scope.SCENARIO, properties=properties)
-        original_data = pd.DataFrame([{"foo": 1, "bar": 2}, {"foo": 3, "bar": 4}])
-        data = dn.read()
-        assert_frame_equal(data, original_data)
-
-        append_data_1 = pd.DataFrame([{"foo": 5, "bar": 6}, {"foo": 7, "bar": 8}])
-        dn.append(append_data_1)
-        assert_frame_equal(dn.read(), pd.concat([original_data, append_data_1]).reset_index(drop=True))
-
-    def test_filter_pandas_exposed_type(self, tmp_sqlite_sqlite3_file_path):
-        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
-        properties = {
-            "db_engine": "sqlite",
-            "table_name": "example",
-            "db_name": db_name,
-            "sqlite_folder_path": folder_path,
-            "sqlite_file_extension": file_extension,
-            "exposed_type": "pandas",
-        }
-        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=properties)
-        dn.write(
-            pd.DataFrame(
-                [
-                    {"foo": 1, "bar": 1},
-                    {"foo": 1, "bar": 2},
-                    {"foo": 1, "bar": 3},
-                    {"foo": 2, "bar": 1},
-                    {"foo": 2, "bar": 2},
-                    {"foo": 2, "bar": 3},
-                ]
-            )
-        )
-
-        # Test datanode indexing and slicing
-        assert dn["foo"].equals(pd.Series([1, 1, 1, 2, 2, 2]))
-        assert dn["bar"].equals(pd.Series([1, 2, 3, 1, 2, 3]))
-        assert dn[:2].equals(pd.DataFrame([{"foo": 1, "bar": 1}, {"foo": 1, "bar": 2}]))
-
-        # Test filter data
-        filtered_by_filter_method = dn.filter(("foo", 1, Operator.EQUAL))
-        filtered_by_indexing = dn[dn["foo"] == 1]
-        expected_data = pd.DataFrame([{"foo": 1, "bar": 1}, {"foo": 1, "bar": 2}, {"foo": 1, "bar": 3}])
-        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
-        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
-
-        filtered_by_filter_method = dn.filter(("foo", 1, Operator.NOT_EQUAL))
-        filtered_by_indexing = dn[dn["foo"] != 1]
-        expected_data = pd.DataFrame([{"foo": 2, "bar": 1}, {"foo": 2, "bar": 2}, {"foo": 2, "bar": 3}])
-        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
-        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
-
-        filtered_by_filter_method = dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR)
-        filtered_by_indexing = dn[(dn["bar"] == 1) | (dn["bar"] == 2)]
-        expected_data = pd.DataFrame(
-            [
-                {"foo": 1, "bar": 1},
-                {"foo": 1, "bar": 2},
-                {"foo": 2, "bar": 1},
-                {"foo": 2, "bar": 2},
-            ]
-        )
-        assert_frame_equal(filtered_by_filter_method.reset_index(drop=True), expected_data)
-        assert_frame_equal(filtered_by_indexing.reset_index(drop=True), expected_data)
-
-    def test_filter_numpy_exposed_type(self, tmp_sqlite_sqlite3_file_path):
-        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
-        properties = {
-            "db_engine": "sqlite",
-            "table_name": "example",
-            "db_name": db_name,
-            "sqlite_folder_path": folder_path,
-            "sqlite_file_extension": file_extension,
-            "exposed_type": "numpy",
-        }
-        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=properties)
-        dn.write(
-            pd.DataFrame(
-                [
-                    {"foo": 1, "bar": 1},
-                    {"foo": 1, "bar": 2},
-                    {"foo": 1, "bar": 3},
-                    {"foo": 2, "bar": 1},
-                    {"foo": 2, "bar": 2},
-                    {"foo": 2, "bar": 3},
-                ]
-            )
-        )
-
-        # Test datanode indexing and slicing
-        assert np.array_equal(dn[0], np.array([1, 1]))
-        assert np.array_equal(dn[1], np.array([1, 2]))
-        assert np.array_equal(dn[:3], np.array([[1, 1], [1, 2], [1, 3]]))
-        assert np.array_equal(dn[:, 0], np.array([1, 1, 1, 2, 2, 2]))
-        assert np.array_equal(dn[1:4, :1], np.array([[1], [1], [2]]))
-
-        # Test filter data
-        assert np.array_equal(dn.filter(("foo", 1, Operator.EQUAL)), np.array([[1, 1], [1, 2], [1, 3]]))
-        assert np.array_equal(dn[dn[:, 0] == 1], np.array([[1, 1], [1, 2], [1, 3]]))
-
-        assert np.array_equal(dn.filter(("foo", 1, Operator.NOT_EQUAL)), np.array([[2, 1], [2, 2], [2, 3]]))
-        assert np.array_equal(dn[dn[:, 0] != 1], np.array([[2, 1], [2, 2], [2, 3]]))
-
-        assert np.array_equal(dn.filter(("bar", 2, Operator.EQUAL)), np.array([[1, 2], [2, 2]]))
-        assert np.array_equal(dn[dn[:, 1] == 2], np.array([[1, 2], [2, 2]]))
-
-        assert np.array_equal(
-            dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR),
-            np.array([[1, 1], [1, 2], [2, 1], [2, 2]]),
-        )
-        assert np.array_equal(dn[(dn[:, 1] == 1) | (dn[:, 1] == 2)], np.array([[1, 1], [1, 2], [2, 1], [2, 2]]))
-
-    def test_filter_does_not_read_all_entities(self, tmp_sqlite_sqlite3_file_path):
-        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
-        properties = {
-            "db_engine": "sqlite",
-            "table_name": "example",
-            "db_name": db_name,
-            "sqlite_folder_path": folder_path,
-            "sqlite_file_extension": file_extension,
-            "exposed_type": "numpy",
-        }
-        dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=properties)
-
-        # SQLTableDataNode.filter() should not call the MongoCollectionDataNode._read() method
-        with patch.object(SQLTableDataNode, "_read") as read_mock:
-            dn.filter(("foo", 1, Operator.EQUAL))
-            dn.filter(("bar", 2, Operator.NOT_EQUAL))
-            dn.filter([("bar", 1, Operator.EQUAL), ("bar", 2, Operator.EQUAL)], JoinOperator.OR)
-
-            assert read_mock["_read"].call_count == 0

+ 236 - 0
tests/core/data/test_write_parquet_data_node.py

@@ -0,0 +1,236 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+import pathlib
+from importlib import util
+
+import numpy as np
+import pandas as pd
+import pytest
+from pandas.testing import assert_frame_equal
+
+from taipy.config.common.scope import Scope
+from taipy.core.data.parquet import ParquetDataNode
+
+
+@pytest.fixture(scope="function", autouse=True)
+def cleanup():
+    yield
+    path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
+    if os.path.isfile(path):
+        os.remove(path)
+
+
+class MyCustomObject:
+    def __init__(self, id, integer, text):
+        self.id = id
+        self.integer = integer
+        self.text = text
+
+    def __eq__(self, value) -> bool:
+        return self.id == value.id and self.integer == value.integer and self.text == value.text
+
+
+class MyOtherCustomObject:
+    def __init__(self, id, sentence):
+        self.id = id
+        self.sentence = sentence
+
+
+def create_custom_class(**kwargs):
+    return MyOtherCustomObject(id=kwargs["id"], sentence=kwargs["text"])
+
+
+class TestWriteParquetDataNode:
+    __engine = ["pyarrow"]
+    if util.find_spec("fastparquet"):
+        __engine.append("fastparquet")
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_read_write_after_modify_path(self, engine):
+        path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet")
+        new_path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/temp.parquet")
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "engine": engine})
+        read_data = dn.read()
+        assert read_data is not None
+        dn.path = new_path
+        with pytest.raises(FileNotFoundError):
+            dn.read()
+        dn.write(read_data)
+        assert dn.read().equals(read_data)
+
+    def test_write_pandas(self, tmpdir_factory):
+        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
+        parquet_dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_file_path})
+
+        df = pd.DataFrame([{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}])
+        parquet_dn.write(df)
+
+        assert pathlib.Path(temp_file_path).exists()
+
+        dn_data = parquet_dn.read()
+
+        assert isinstance(dn_data, pd.DataFrame)
+        assert dn_data.equals(df)
+
+        parquet_dn.write(df["a"])
+        assert pd.DataFrame.equals(parquet_dn.read(), df[["a"]])
+
+        series = pd.Series([1, 2, 3])
+        parquet_dn.write(series)
+        assert np.array_equal(parquet_dn.read().to_numpy(), pd.DataFrame(series).to_numpy())
+
+        parquet_dn.write(None)
+        assert parquet_dn.read().empty
+
+    def test_write_numpy(self, tmpdir_factory):
+        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
+        parquet_dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": "numpy"}
+        )
+
+        arr = np.array([[1], [2], [3], [4], [5]])
+        parquet_dn.write(arr)
+        assert np.array_equal(parquet_dn.read(), arr)
+
+        arr = arr[0:3]
+        parquet_dn.write(arr)
+        assert np.array_equal(parquet_dn.read(), arr)
+
+        parquet_dn.write(None)
+        assert parquet_dn.read().size == 0
+
+    def test_write_custom_exposed_type(self, tmpdir_factory):
+        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
+        parquet_dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": temp_file_path, "exposed_type": MyCustomObject}
+        )
+
+        data = [MyCustomObject(0, 1, "hi"), MyCustomObject(1, 2, "world"), MyCustomObject(2, 3, "text")]
+        parquet_dn.write(data)
+        assert all(actual == expected for actual, expected in zip(parquet_dn.read(), data))
+
+        parquet_dn.write(None)
+        assert parquet_dn.read() == []
+
+    @pytest.mark.parametrize("engine", __engine)
+    def test_write_kwarg_precedence(self, engine, tmpdir_factory, default_data_frame):
+        # Precedence:
+        # 1. Class read/write methods
+        # 2. Defined in read_kwargs and write_kwargs, in properties
+        # 3. Defined top-level in properties
+
+        temp_file_path = str(tmpdir_factory.mktemp("data").join("temp.parquet"))
+        temp_file_2_path = str(tmpdir_factory.mktemp("data").join("temp_2.parquet"))
+        df = default_data_frame.copy(deep=True)
+
+        # Write
+        # 3
+        comp3 = "snappy"
+        dn = ParquetDataNode(
+            "foo", Scope.SCENARIO, properties={"path": temp_file_path, "engine": engine, "compression": comp3}
+        )
+        dn.write(df)
+        df.to_parquet(path=temp_file_2_path, compression=comp3, engine=engine)
+        with open(temp_file_2_path, "rb") as tf:
+            with pathlib.Path(temp_file_path).open("rb") as f:
+                assert f.read() == tf.read()
+
+        # 3 and 2
+        comp2 = "gzip"
+        dn = ParquetDataNode(
+            "foo",
+            Scope.SCENARIO,
+            properties={
+                "path": temp_file_path,
+                "engine": engine,
+                "compression": comp3,
+                "write_kwargs": {"compression": comp2},
+            },
+        )
+        dn.write(df)
+        df.to_parquet(path=temp_file_2_path, compression=comp2, engine=engine)
+        with open(temp_file_2_path, "rb") as tf:
+            with pathlib.Path(temp_file_path).open("rb") as f:
+                assert f.read() == tf.read()
+
+        # 3, 2 and 1
+        comp1 = "brotli"
+        dn = ParquetDataNode(
+            "foo",
+            Scope.SCENARIO,
+            properties={
+                "path": temp_file_path,
+                "engine": engine,
+                "compression": comp3,
+                "write_kwargs": {"compression": comp2},
+            },
+        )
+        dn.write_with_kwargs(df, compression=comp1)
+        df.to_parquet(path=temp_file_2_path, compression=comp1, engine=engine)
+        with open(temp_file_2_path, "rb") as tf:
+            with pathlib.Path(temp_file_path).open("rb") as f:
+                assert f.read() == tf.read()
+
+        # Read
+        df.to_parquet(temp_file_path, engine=engine)
+        # 2
+        cols2 = ["a", "b"]
+        dn = ParquetDataNode(
+            "foo",
+            Scope.SCENARIO,
+            properties={"path": temp_file_path, "engine": engine, "read_kwargs": {"columns": cols2}},
+        )
+        assert set(dn.read().columns) == set(cols2)
+
+        # 1
+        cols1 = ["a"]
+        dn = ParquetDataNode(
+            "foo",
+            Scope.SCENARIO,
+            properties={"path": temp_file_path, "engine": engine, "read_kwargs": {"columns": cols2}},
+        )
+        assert set(dn.read_with_kwargs(columns=cols1).columns) == set(cols1)
+
+    def test_partition_cols(self, tmpdir_factory, default_data_frame: pd.DataFrame):
+        temp_dir_path = str(tmpdir_factory.mktemp("data").join("temp_dir"))
+
+        write_kwargs = {"partition_cols": ["a", "b"]}
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": temp_dir_path, "write_kwargs": write_kwargs})  # type: ignore
+        dn.write(default_data_frame)
+
+        assert pathlib.Path(temp_dir_path).is_dir()
+        # dtypes change during round-trip with partition_cols
+        pd.testing.assert_frame_equal(
+            dn.read().sort_index(axis=1),
+            default_data_frame.sort_index(axis=1),
+            check_dtype=False,
+            check_categorical=False,
+        )
+
+    @pytest.mark.skipif(not util.find_spec("fastparquet"), reason="Append parquet requires fastparquet to be installed")
+    @pytest.mark.parametrize(
+        "content",
+        [
+            ([{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}]),
+            (pd.DataFrame([{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}])),
+        ],
+    )
+    def test_append_pandas(self, parquet_file_path, default_data_frame, content):
+        dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": parquet_file_path})
+        assert_frame_equal(dn.read(), default_data_frame)
+
+        dn.append(content)
+        assert_frame_equal(
+            dn.read(),
+            pd.concat([default_data_frame, pd.DataFrame(content, columns=["a", "b", "c"])]).reset_index(drop=True),
+        )

+ 186 - 0
tests/core/data/test_write_sql_table_data_node.py

@@ -0,0 +1,186 @@
+# Copyright 2021-2024 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from importlib import util
+from unittest.mock import patch
+
+import numpy as np
+import pandas as pd
+import pytest
+from pandas.testing import assert_frame_equal
+
+from taipy.config.common.scope import Scope
+from taipy.core.data.sql_table import SQLTableDataNode
+
+
+class MyCustomObject:
+    def __init__(self, x=None, y=None):
+        self.x = x
+        self.y = y
+
+
+class TestWriteSQLTableDataNode:
+    __pandas_properties = [
+        {
+            "db_name": "taipy",
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_extra_args": {
+                "TrustServerCertificate": "yes",
+                "other": "value",
+            },
+        },
+    ]
+
+    if util.find_spec("pyodbc"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "mssql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    if util.find_spec("pymysql"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "mysql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    if util.find_spec("psycopg2"):
+        __pandas_properties.append(
+            {
+                "db_username": "sa",
+                "db_password": "Passw0rd",
+                "db_name": "taipy",
+                "db_engine": "postgresql",
+                "table_name": "example",
+                "db_extra_args": {
+                    "TrustServerCertificate": "yes",
+                },
+            },
+        )
+
+    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
+    def test_write_pandas(self, pandas_properties):
+        custom_properties = pandas_properties.copy()
+        custom_properties.pop("db_extra_args")
+        sql_table_dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
+
+        with patch("sqlalchemy.engine.Engine.connect") as engine_mock, patch(
+            "taipy.core.data.sql_table.SQLTableDataNode._create_table"
+        ) as _:
+            cursor_mock = engine_mock.return_value.__enter__.return_value
+            cursor_mock.execute.side_effect = None
+
+            with patch("taipy.core.data.sql_table.SQLTableDataNode._SQLTableDataNode__insert_dataframe") as mck:
+                df = pd.DataFrame([{"a": 11, "b": 22, "c": 33}, {"a": 44, "b": 55, "c": 66}])
+                sql_table_dn.write(df)
+                assert mck.call_count == 1
+
+                sql_table_dn.write(df["a"])
+                assert mck.call_count == 2
+
+                sql_table_dn.write(pd.DataFrame())
+                assert mck.call_count == 3
+
+                series = pd.Series([1, 2, 3])
+                sql_table_dn.write(series)
+                assert mck.call_count == 4
+
+                sql_table_dn.write(None)
+                assert mck.call_count == 5
+
+    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
+    def test_write_numpy(self, pandas_properties):
+        custom_properties = pandas_properties.copy()
+        custom_properties["exposed_type"] = "numpy"
+        custom_properties.pop("db_extra_args")
+        sql_table_dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
+
+        with patch("sqlalchemy.engine.Engine.connect") as engine_mock, patch(
+            "taipy.core.data.sql_table.SQLTableDataNode._create_table"
+        ) as _:
+            cursor_mock = engine_mock.return_value.__enter__.return_value
+            cursor_mock.execute.side_effect = None
+
+            with patch("taipy.core.data.sql_table.SQLTableDataNode._SQLTableDataNode__insert_dataframe") as mck:
+                arr = np.array([[1], [2], [3], [4], [5]])
+                sql_table_dn.write(arr)
+                assert mck.call_count == 1
+
+                sql_table_dn.write(arr[0:3])
+                assert mck.call_count == 2
+
+                sql_table_dn.write(np.array([]))
+                assert mck.call_count == 3
+
+                sql_table_dn.write(None)
+                assert mck.call_count == 4
+
+    @pytest.mark.parametrize("pandas_properties", __pandas_properties)
+    def test_write_custom_exposed_type(self, pandas_properties):
+        custom_properties = pandas_properties.copy()
+        custom_properties["exposed_type"] = MyCustomObject
+        custom_properties.pop("db_extra_args")
+        sql_table_dn = SQLTableDataNode("foo", Scope.SCENARIO, properties=custom_properties)
+
+        with patch("sqlalchemy.engine.Engine.connect") as engine_mock, patch(
+            "taipy.core.data.sql_table.SQLTableDataNode._create_table"
+        ) as _:
+            cursor_mock = engine_mock.return_value.__enter__.return_value
+            cursor_mock.execute.side_effect = None
+
+            with patch("taipy.core.data.sql_table.SQLTableDataNode._SQLTableDataNode__insert_dataframe") as mck:
+                custom_data = [
+                    MyCustomObject(1, 2),
+                    MyCustomObject(3, 4),
+                    MyCustomObject(None, 2),
+                    MyCustomObject(1, None),
+                    MyCustomObject(None, None),
+                ]
+                sql_table_dn.write(custom_data)
+                assert mck.call_count == 1
+
+                sql_table_dn.write(None)
+                assert mck.call_count == 2
+
+    def test_sqlite_append_pandas(self, tmp_sqlite_sqlite3_file_path):
+        folder_path, db_name, file_extension = tmp_sqlite_sqlite3_file_path
+        properties = {
+            "db_engine": "sqlite",
+            "table_name": "example",
+            "db_name": db_name,
+            "sqlite_folder_path": folder_path,
+            "sqlite_file_extension": file_extension,
+        }
+
+        dn = SQLTableDataNode("sqlite_dn", Scope.SCENARIO, properties=properties)
+        original_data = pd.DataFrame([{"foo": 1, "bar": 2}, {"foo": 3, "bar": 4}])
+        data = dn.read()
+        assert_frame_equal(data, original_data)
+
+        append_data_1 = pd.DataFrame([{"foo": 5, "bar": 6}, {"foo": 7, "bar": 8}])
+        dn.append(append_data_1)
+        assert_frame_equal(dn.read(), pd.concat([original_data, append_data_1]).reset_index(drop=True))

+ 29 - 32
tests/core/job/test_job_manager.py

@@ -14,13 +14,14 @@ import random
 import string
 from functools import partial
 from time import sleep
+from typing import cast
 from unittest import mock
 
 import pytest
 
 from taipy.config.common.scope import Scope
 from taipy.config.config import Config
-from taipy.core._orchestrator._dispatcher._job_dispatcher import _JobDispatcher
+from taipy.core._orchestrator._dispatcher import _StandaloneJobDispatcher
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.config.job_config import JobConfig
 from taipy.core.data._data_manager import _DataManager
@@ -160,11 +161,10 @@ def test_raise_when_trying_to_delete_unfinished_job():
     task = Task(
         "task_config_1", {}, partial(lock_multiply, lock), [dn_1, dn_2], [dn_3], id="raise_when_delete_unfinished"
     )
-    _OrchestratorFactory._build_dispatcher()
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher())
     with lock:
         job = _OrchestratorFactory._orchestrator.submit_task(task)._jobs[0]
-
-        assert_true_after_time(lambda: len(_JobDispatcher._dispatched_processes) == 1)
+        assert_true_after_time(lambda: dispatcher._nb_available_workers == 1)
         assert_true_after_time(job.is_running)
         with pytest.raises(JobNotDeletedException):
             _JobManager._delete(job)
@@ -203,19 +203,17 @@ def test_cancel_single_job():
 
     task = _create_task(multiply, name="cancel_single_job")
 
-    _OrchestratorFactory._build_dispatcher()
-
-    assert_true_after_time(_OrchestratorFactory._dispatcher.is_running)
-    _OrchestratorFactory._dispatcher.stop()
-    assert_true_after_time(lambda: not _OrchestratorFactory._dispatcher.is_running())
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher())
 
+    assert_true_after_time(dispatcher.is_running)
+    dispatcher.stop()
+    assert_true_after_time(lambda: not dispatcher.is_running())
     job = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
 
     assert_true_after_time(job.is_pending)
-    assert_true_after_time(lambda: len(_JobDispatcher._dispatched_processes) == 0)
+    assert_true_after_time(lambda: dispatcher._nb_available_workers == 1)
     _JobManager._cancel(job.id)
     assert_true_after_time(job.is_canceled)
-    assert_true_after_time(job.is_canceled)
 
 
 @mock.patch(
@@ -228,11 +226,11 @@ def test_cancel_canceled_abandoned_failed_jobs(cancel_jobs, orchestrated_job):
 
     task = _create_task(multiply, name="test_cancel_canceled_abandoned_failed_jobs")
 
-    _OrchestratorFactory._build_dispatcher()
+    dispatcher = _OrchestratorFactory._build_dispatcher()
 
-    assert_true_after_time(_OrchestratorFactory._dispatcher.is_running)
-    _OrchestratorFactory._dispatcher.stop()
-    assert_true_after_time(lambda: not _OrchestratorFactory._dispatcher.is_running())
+    assert_true_after_time(dispatcher.is_running)
+    dispatcher.stop()
+    assert_true_after_time(lambda: not dispatcher.is_running())
 
     job = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
     job.canceled()
@@ -265,11 +263,11 @@ def test_cancel_completed_skipped_jobs(cancel_jobs, orchestrated_job):
     Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=1)
     task = _create_task(multiply, name="cancel_single_job")
 
-    _OrchestratorFactory._build_dispatcher()
+    dispatcher = _OrchestratorFactory._build_dispatcher()
 
-    assert_true_after_time(_OrchestratorFactory._dispatcher.is_running)
-    _OrchestratorFactory._dispatcher.stop()
-    assert_true_after_time(lambda: not _OrchestratorFactory._dispatcher.is_running())
+    assert_true_after_time(dispatcher.is_running)
+    dispatcher.stop()
+    assert_true_after_time(lambda: not dispatcher.is_running())
 
     job = _OrchestratorFactory._orchestrator.submit_task(task).jobs[0]
     job.completed()
@@ -307,22 +305,19 @@ def test_cancel_single_running_job():
     dnm._set(dn_3)
     task = Task("task_config_1", {}, partial(lock_multiply, lock), [dn_1, dn_2], [dn_3], id="cancel_single_job")
 
-    _OrchestratorFactory._build_dispatcher()
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
 
-    assert_true_after_time(_OrchestratorFactory._dispatcher.is_running)
-    assert_true_after_time(lambda: _OrchestratorFactory._dispatcher._nb_available_workers == 2)
+    assert_true_after_time(dispatcher.is_running)
+    assert_true_after_time(lambda: dispatcher._nb_available_workers == 2)
 
     with lock:
         job = _OrchestratorFactory._orchestrator.submit_task(task)._jobs[0]
-
-        assert_true_after_time(lambda: len(_JobDispatcher._dispatched_processes) == 1)
-        assert_true_after_time(lambda: _OrchestratorFactory._dispatcher._nb_available_workers == 1)
         assert_true_after_time(job.is_running)
+        assert dispatcher._nb_available_workers == 1
         _JobManager._cancel(job)
         assert_true_after_time(job.is_running)
-    assert_true_after_time(lambda: len(_JobDispatcher._dispatched_processes) == 0)
-    assert_true_after_time(lambda: _OrchestratorFactory._dispatcher._nb_available_workers == 2)
     assert_true_after_time(job.is_completed)
+    assert dispatcher._nb_available_workers == 2
 
 
 def test_cancel_subsequent_jobs():
@@ -411,10 +406,12 @@ def test_cancel_subsequent_jobs():
     assert_true_after_time(job_4.is_canceled)
     assert_true_after_time(job_5.is_abandoned)
     assert_true_after_time(job_6.is_abandoned)
-    assert_true_after_time(lambda: all(
-        not _OrchestratorFactory._orchestrator._is_blocked(job)
-        for job in [job_1, job_2, job_3, job_4, job_5, job_6]
-    ))
+    assert_true_after_time(
+        lambda: all(
+            not _OrchestratorFactory._orchestrator._is_blocked(job)
+            for job in [job_1, job_2, job_3, job_4, job_5, job_6]
+        )
+    )
     assert_true_after_time(lambda: _OrchestratorFactory._orchestrator.jobs_to_run.qsize() == 0)
 
 
@@ -474,7 +471,7 @@ def _create_task(function, nb_outputs=1, name=None):
     output_dn_configs = [
         Config.configure_data_node(f"output{i}", "pickle", Scope.SCENARIO, default_data=0) for i in range(nb_outputs)
     ]
-    _DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs})
+    _DataManager._bulk_get_or_create(output_dn_configs)
     name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
     task_config = Config.configure_task(
         id=name,

+ 8 - 9
tests/core/job/test_job_manager_with_sql_repo.py

@@ -14,13 +14,14 @@ import random
 import string
 from functools import partial
 from time import sleep
+from typing import cast
 
 import pytest
 
 from taipy.config.common.scope import Scope
 from taipy.config.config import Config
 from taipy.core import Task
-from taipy.core._orchestrator._dispatcher._job_dispatcher import _JobDispatcher
+from taipy.core._orchestrator._dispatcher import _StandaloneJobDispatcher
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.config.job_config import JobConfig
 from taipy.core.data import InMemoryDataNode
@@ -134,10 +135,8 @@ def test_delete_job(init_sql_repo):
 
 
 def test_raise_when_trying_to_delete_unfinished_job(init_sql_repo):
-    Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=2)
+    Config.configure_job_executions(mode=JobConfig._STANDALONE_MODE, max_nb_of_workers=3)
 
-    m = multiprocessing.Manager()
-    lock = m.Lock()
     dnm = _DataManagerFactory._build_manager()
     dn_1 = InMemoryDataNode("dn_config_1", Scope.SCENARIO, properties={"default_data": 1})
     dnm._set(dn_1)
@@ -145,14 +144,15 @@ def test_raise_when_trying_to_delete_unfinished_job(init_sql_repo):
     dnm._set(dn_2)
     dn_3 = InMemoryDataNode("dn_config_3", Scope.SCENARIO)
     dnm._set(dn_3)
+    proc_manager = multiprocessing.Manager()
+    lock = proc_manager.Lock()
     task = Task("task_cfg", {}, partial(lock_multiply, lock), [dn_1, dn_2], [dn_3], id="raise_when_delete_unfinished")
-    _OrchestratorFactory._build_dispatcher()
+    dispatcher = cast(_StandaloneJobDispatcher, _OrchestratorFactory._build_dispatcher(force_restart=True))
 
     with lock:
         job = _OrchestratorFactory._orchestrator.submit_task(task)._jobs[0]
-
-        assert_true_after_time(lambda: len(_JobDispatcher._dispatched_processes) == 1)
         assert_true_after_time(job.is_running)
+        assert dispatcher._nb_available_workers == 2
         with pytest.raises(JobNotDeletedException):
             _JobManager._delete(job)
         with pytest.raises(JobNotDeletedException):
@@ -178,7 +178,6 @@ def test_force_deleting_unfinished_job(init_sql_repo):
     )
     reference_last_edit_date = dn_3.last_edit_date
     _OrchestratorFactory._build_dispatcher()
-
     with lock:
         job = _OrchestratorFactory._orchestrator.submit_task(task_1)._jobs[0]
         assert_true_after_time(job.is_running)
@@ -245,7 +244,7 @@ def _create_task(function, nb_outputs=1, name=None):
     output_dn_configs = [
         Config.configure_data_node(f"output{i}", scope=Scope.SCENARIO, default_data=0) for i in range(nb_outputs)
     ]
-    _DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs})
+    _DataManager._bulk_get_or_create(output_dn_configs)
     name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
     task_config = Config.configure_task(
         id=name,

+ 1 - 1
tests/core/scenario/test_scenario_manager.py

@@ -559,7 +559,7 @@ def test_scenario_manager_only_creates_data_node_once():
     scenario_1_sorted_tasks = scenario_1._get_sorted_tasks()
     expected = [{task_mult_by_2_config.id, task_mult_by_4_config.id}, {task_mult_by_3_config.id}]
     for i, list_tasks_by_level in enumerate(scenario_1_sorted_tasks):
-        assert set(t.config_id for t in list_tasks_by_level) == expected[i]
+        assert {t.config_id for t in list_tasks_by_level} == expected[i]
     assert scenario_1.cycle.frequency == Frequency.DAILY
 
     _ScenarioManager._create(scenario_config)

+ 1 - 1
tests/core/scenario/test_scenario_manager_with_sql_repo.py

@@ -371,7 +371,7 @@ def test_scenario_manager_only_creates_data_node_once(init_sql_repo):
     scenario_1_sorted_tasks = scenario_1._get_sorted_tasks()
     expected = [{task_mult_by_2_config.id, task_mult_by_4_config.id}, {task_mult_by_3_config.id}]
     for i, list_tasks_by_level in enumerate(scenario_1_sorted_tasks):
-        assert set(t.config_id for t in list_tasks_by_level) == expected[i]
+        assert {t.config_id for t in list_tasks_by_level} == expected[i]
     assert scenario_1.cycle.frequency == Frequency.DAILY
 
     _ScenarioManager._create(scenario_config)

+ 3 - 3
tools/gui/generate_pyi.py

@@ -38,9 +38,9 @@ replaced_content = ""
 with open(gui_pyi_file, "r") as file:
     for line in file:
         if "def run(" in line:
-            line = line.replace(
-                ", run_server: bool = ..., run_in_thread: bool = ..., async_mode: str = ..., **kwargs", gui_config
-            )
+            replace_str = line[line.index(", run_server") : (line.index("**kwargs") + len("**kwargs"))]
+            # ", run_server: bool = ..., run_in_thread: bool = ..., async_mode: str = ..., **kwargs"
+            line = line.replace(replace_str, gui_config)
         replaced_content = replaced_content + line
 
 with open(gui_pyi_file, "w") as write_file:

+ 2 - 0
tools/packages/taipy-config/MANIFEST.in

@@ -1,2 +1,4 @@
 include taipy/config/*.pyi
 include taipy/config/*.json
+include *.json
+include taipy/config/setup.requirements.txt

+ 9 - 4
tools/packages/taipy-config/setup.py

@@ -13,7 +13,6 @@
 
 """The setup script."""
 import json
-
 from pathlib import Path
 
 from setuptools import find_packages, setup
@@ -22,13 +21,17 @@ root_folder = Path(__file__).parent
 
 readme = Path(root_folder / "README.md").read_text("UTF-8")
 
-with open(root_folder / "taipy" / "config" / "version.json") as version_file:
+version_path = "taipy/config/version.json"
+
+setup_requirements = Path("taipy/config/setup.requirements.txt")
+
+with open(version_path) as version_file:
     version = json.load(version_file)
     version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
     if vext := version.get("ext"):
         version_string = f"{version_string}.{vext}"
 
-requirements = [r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r]
+requirements = [r for r in (setup_requirements).read_text("UTF-8").splitlines() if r]
 
 test_requirements = ["pytest>=3.8"]
 
@@ -56,7 +59,9 @@ setup(
     keywords="taipy-config",
     name="taipy-config",
     packages=find_packages(
-        where=root_folder, include=["taipy", "taipy.config", "taipy.config.*", "taipy.logger", "taipy.logger.*"]
+        where=root_folder, include=[
+            "taipy", "taipy.config", "taipy.config.*", "taipy.logger", "taipy.logger.*", "taipy._cli", "taipy._cli.*"
+        ]
     ),
     test_suite="tests",
     tests_require=test_requirements,

Kaikkia tiedostoja ei voida näyttää, sillä liian monta tiedostoa muuttui tässä diffissä