Pārlūkot izejas kodu

Merge branch 'develop' into feature/server-generalize

Dinh Long Nguyen 1 mēnesi atpakaļ
vecāks
revīzija
dfaf7fdeef
93 mainītis faili ar 2193 papildinājumiem un 1216 dzēšanām
  1. 132 98
      .github/workflows/build-and-release-single-package.yml
  2. 165 131
      .github/workflows/build-and-release.yml
  3. 1 0
      Pipfile
  4. 54 0
      doc/gui/examples/charts/advanced_animation.py
  5. 4 4
      frontend/taipy-gui/base/src/app.ts
  6. 2 2
      frontend/taipy-gui/base/src/packaging/taipy-gui-base.d.ts
  7. 2 2
      frontend/taipy-gui/base/src/socket.ts
  8. 5 5
      frontend/taipy-gui/base/src/wsAdapter.ts
  9. 148 8
      frontend/taipy-gui/src/components/Taipy/Chart.tsx
  10. 1 1
      frontend/taipy-gui/src/components/Taipy/Expandable.spec.tsx
  11. 1 0
      frontend/taipy-gui/src/components/Taipy/Input.tsx
  12. 4 1
      frontend/taipy-gui/src/components/Taipy/Metric.tsx
  13. 1 1
      frontend/taipy-gui/src/components/Taipy/tableUtils.tsx
  14. 3 2
      frontend/taipy-gui/src/context/taipyReducers.spec.ts
  15. 15 12
      frontend/taipy-gui/src/context/taipyReducers.ts
  16. 2 2
      frontend/taipy-gui/src/context/wsUtils.ts
  17. 96 96
      frontend/taipy-gui/webpack.config.js
  18. BIN
      readme_img/gui_creation.webp
  19. BIN
      readme_img/readme_app.gif
  20. BIN
      readme_img/readme_cloud_demo.gif
  21. BIN
      readme_img/readme_demo_studio.gif
  22. BIN
      readme_img/readme_exec_graph.png
  23. BIN
      readme_img/readme_logo.png
  24. BIN
      readme_img/scenario_and_data_mgt.gif
  25. BIN
      readme_img/taipy-github-optimized.png
  26. BIN
      readme_img/taipy_banner.png
  27. BIN
      readme_img/taipy_github_GUI_video.gif
  28. BIN
      readme_img/taipy_github_data_support.png
  29. BIN
      readme_img/taipy_github_scenario.png
  30. BIN
      readme_img/taipy_github_scenarios_video.gif
  31. BIN
      readme_img/tiny_demo_readme.gif
  32. 9 20
      taipy/common/pyproject.toml
  33. 1 1
      taipy/common/version.json
  34. 1 1
      taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py
  35. 42 2
      taipy/core/data/_data_manager.py
  36. 12 14
      taipy/core/data/data_node.py
  37. 11 11
      taipy/core/pyproject.toml
  38. 1 1
      taipy/core/version.json
  39. 0 1
      taipy/gui/_default_config.py
  40. 1 0
      taipy/gui/_renderers/factory.py
  41. 2 2
      taipy/gui/_renderers/json.py
  42. 0 2
      taipy/gui/config.py
  43. 1 1
      taipy/gui/data/content_accessor.py
  44. 10 11
      taipy/gui/gui.py
  45. 1 1
      taipy/gui/types.py
  46. 4 3
      taipy/gui/utils/chart_config_builder.py
  47. 1 1
      taipy/gui/version.json
  48. 5 0
      taipy/gui/viselements.json
  49. 9 7
      taipy/rest/pyproject.toml
  50. 1 1
      taipy/rest/version.json
  51. 6 7
      taipy/templates/pyproject.toml
  52. 1 1
      taipy/templates/version.json
  53. 1 1
      taipy/version.json
  54. 64 1
      tests/core/data/test_data_manager.py
  55. 3 1
      tests/core/data/test_data_node.py
  56. 3 1
      tests/core/data/test_in_memory_data_node.py
  57. 3 1
      tests/core/data/test_json_data_node.py
  58. 3 1
      tests/core/data/test_pickle_data_node.py
  59. 6 3
      tests/core/data/test_read_csv_data_node.py
  60. 12 11
      tests/core/data/test_read_excel_data_node.py
  61. 3 1
      tests/core/data/test_read_parquet_data_node.py
  62. 20 0
      tests/gui/control/test_file_download.py
  63. 90 0
      tests/gui/gui_specific/test_json_adapter.py
  64. 0 16
      tests/tools/release/__init__.py
  65. 156 0
      tests/tools/release/test_version.py
  66. 1 0
      tools/packages/pipfiles/Pipfile3.10.max
  67. 1 0
      tools/packages/pipfiles/Pipfile3.11.max
  68. 1 0
      tools/packages/pipfiles/Pipfile3.12.max
  69. 1 0
      tools/packages/pipfiles/Pipfile3.9.max
  70. 3 2
      tools/packages/taipy-common/MANIFEST.in
  71. 4 30
      tools/packages/taipy-common/setup.py
  72. 3 2
      tools/packages/taipy-core/MANIFEST.in
  73. 4 26
      tools/packages/taipy-core/setup.py
  74. 3 2
      tools/packages/taipy-gui/MANIFEST.in
  75. 4 54
      tools/packages/taipy-gui/setup.py
  76. 3 2
      tools/packages/taipy-rest/MANIFEST.in
  77. 4 15
      tools/packages/taipy-rest/setup.py
  78. 3 2
      tools/packages/taipy-templates/MANIFEST.in
  79. 6 18
      tools/packages/taipy-templates/setup.py
  80. 0 22
      tools/packages/taipy/MANIFEST.in
  81. 3 36
      tools/packages/taipy/setup.py
  82. 148 12
      tools/release/build_package_structure.py
  83. 47 0
      tools/release/bump_patch_version.py
  84. 0 97
      tools/release/bump_version.py
  85. 421 0
      tools/release/common.py
  86. 98 0
      tools/release/delete_dev_releases.py
  87. 0 36
      tools/release/delete_dev_releases.sh
  88. 0 80
      tools/release/fetch_latest_versions.py
  89. 0 97
      tools/release/setup_project.py
  90. 0 125
      tools/release/setup_version.py
  91. 192 0
      tools/release/setup_versions.py
  92. 0 43
      tools/release/update_setup.py
  93. 124 25
      tools/release/update_setup_requirements.py

+ 132 - 98
.github/workflows/build-and-release-single-package.yml

@@ -1,119 +1,136 @@
-name: Build and release one taipy sub-package
+name: Build one taipy sub-package release
 
 on:
   workflow_dispatch:
     inputs:
-      internal_dep_on_pypi:
-        description: "Point taipy internal dependencies to Pypi? If false it will point to the github .tar.gz release file"
-        default: "false"
-        required: true
-      release_type:
-        description: "The type of release to be made (dev or production)"
-        default: "dev"
+      target_package:
+        description: "Package name"
         required: true
+        type: choice
+        options:
+          - gui
+          - common
+          - core
+          - rest
+          - templates
+          - taipy
       target_version:
-        description: "The version of the package to be released"
+        description: "Package version"
         required: true
-      target_package:
-        description: "The package to be released (gui, common, core, rest, templates, taipy)"
+      release_type:
+        description: "Release type"
         required: true
+        type: choice
+        options:
+          - dev
+          - production
+        default: "dev"
+      sub_packages_location:
+        description: "Dependencies location"
+        required: true
+        type: choice
+        options:
+          - GitHub
+          - Pypi
+        default: "GitHub"
 
 env:
   NODE_OPTIONS: --max-old-space-size=4096
 
 permissions:
   contents: write
+  pull-requests: write
 
 jobs:
-  fetch-versions:
+  setup-versions:
     runs-on: ubuntu-latest
     outputs:
+        branch: ${{ steps.version-setup.outputs.branch }}
         common_VERSION: ${{ steps.version-setup.outputs.common_VERSION }}
         core_VERSION: ${{ steps.version-setup.outputs.core_VERSION }}
         gui_VERSION: ${{ steps.version-setup.outputs.gui_VERSION }}
         rest_VERSION: ${{ steps.version-setup.outputs.rest_VERSION }}
         templates_VERSION: ${{ steps.version-setup.outputs.templates_VERSION }}
         taipy_VERSION: ${{ steps.version-setup.outputs.taipy_VERSION }}
+        LATEST_TAIPY_VERSION: ${{ steps.version-setup.outputs.LATEST_TAIPY_VERSION }}
     steps:
       - uses: actions/checkout@v4
-      - name: Extract branch name
-        shell: bash
-        run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
-        id: extract_branch
 
-      - name: Setup Version
+      - name: Validate target version
+        run: |
+          version="${{ github.event.inputs.target_version }}"
+          if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+            echo "❌ Invalid version format: '$version' - <M>.<m>.<patch> is mandatory."
+            exit 1
+          fi
+          echo "✅ Valid target version: $version"
+
+      - name: Install mandatory Python packages
+        run: |
+          python -m pip install --upgrade pip
+          pip install requests
+
+      - name: Setup versions
         id: version-setup
         run: |
-          python tools/release/fetch_latest_versions.py \
-          ${{ github.event.inputs.release_type }} \
-          ${{ github.event.inputs.internal_dep_on_pypi }} \
-          ${{ github.event.inputs.target_version }} \
-          ${{ github.event.inputs.target_package }} >> $GITHUB_OUTPUT
+          python tools/release/setup_versions.py \
+          ${{ github.event.inputs.target_package }} \
+          -v ${{ github.event.inputs.target_version }} \
+          -t ${{ github.event.inputs.release_type }} \
+          -r ${{ github.repository }} >>$GITHUB_OUTPUT
+          echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >>$GITHUB_OUTPUT
 
-  build-and-release-package:
-    needs: [fetch-versions]
+  build-package-release:
+    needs: setup-versions
     timeout-minutes: 20
     runs-on: ubuntu-latest
     steps:
       - uses: actions/checkout@v4
-        with:
-          ssh-key: ${{secrets.DEPLOY_KEY}}
       - uses: actions/setup-python@v5
         with:
           python-version: 3.9
       - uses: actions/setup-node@v4
         with:
-          node-version: '20'
+          node-version: "20"
 
       - name: Extract commit hash
+        id: extract_hash
         shell: bash
         run: echo "HASH=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
-        id: extract_hash
 
-      - name: Set Build Variables
-        id: set-variables
+      - name: Install mandatory Python packages
+        run: |
+          python -m pip install --upgrade pip
+          pip install requests
+
+      - name: Update setup.requirements.txt
+        run: |
+          python tools/release/update_setup_requirements.py ${{ github.event.inputs.target_package }} \
+            ${{ needs.setup-versions.outputs.common_VERSION }} \
+            ${{ needs.setup-versions.outputs.core_VERSION }} \
+            ${{ needs.setup-versions.outputs.gui_VERSION }} \
+            ${{ needs.setup-versions.outputs.rest_VERSION }} \
+            ${{ needs.setup-versions.outputs.templates_VERSION }} \
+            -deps ${{ github.event.inputs.sub_packages_location }} \
+            -r ${{ github.repository }}
+
+      - name: Set package version for ${{ github.event.inputs.target_package }} ${{ github.event.inputs.target_version }}
+        id: package-version
+        shell: bash
         run: |
           if [ "${{ github.event.inputs.target_package }}" == "common" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.common_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/common" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.common_VERSION}}-common" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-common-${{needs.fetch-versions.outputs.common_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.common_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ github.event.inputs.target_package }}" == "core" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.core_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/core" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.core_VERSION}}-core" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-core-${{needs.fetch-versions.outputs.core_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.core_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ github.event.inputs.target_package }}" == "gui" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.gui_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/gui" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.gui_VERSION}}-gui" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-gui-${{needs.fetch-versions.outputs.gui_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.gui_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ github.event.inputs.target_package }}" == "rest" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.rest_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/rest" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.rest_VERSION}}-rest" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-rest-${{needs.fetch-versions.outputs.rest_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.rest_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ github.event.inputs.target_package }}" == "templates" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.templates_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/templates" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.templates_VERSION}}-templates" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-templates-${{needs.fetch-versions.outputs.templates_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.templates_VERSION }}" >> $GITHUB_OUTPUT
+          elif [ "${{ github.event.inputs.target_package }}" == "taipy" ]; then
+            echo "version=${{ needs.setup-versions.outputs.taipy_VERSION }}" >> $GITHUB_OUTPUT
           fi
-        shell: bash
-
-      - name: Update setup.requirements.txt
-        run: |
-          python tools/release/update_setup_requirements.py taipy-${{ github.event.inputs.target_package }} \
-            ${{needs.fetch-versions.outputs.common_VERSION}} \
-            ${{needs.fetch-versions.outputs.core_VERSION}} \
-            ${{needs.fetch-versions.outputs.gui_VERSION}} \
-            ${{needs.fetch-versions.outputs.rest_VERSION}} \
-            ${{needs.fetch-versions.outputs.templates_VERSION}} \
-            ${{ github.event.inputs.internal_dep_on_pypi }}
-
-      - name: Copy tools
-        run: |
-          cp -r tools ${{ steps.set-variables.outputs.package_dir }}
 
       - name: Install dependencies
         run: |
@@ -121,63 +138,80 @@ jobs:
           pip install build wheel pipenv mypy black isort
 
       - name: Install GUI dependencies
-        if: github.event.inputs.target_package == 'gui'
+        if: ${{ github.event.inputs.target_package == 'gui' || github.event.inputs.target_package == 'taipy' }}
         run: |
           pipenv install --dev
 
       - name: Generate GUI pyi file
-        if: github.event.inputs.target_package == 'gui'
+        if: ${{ github.event.inputs.target_package == 'gui' }}
         run: |
           pipenv run python tools/gui/generate_pyi.py
 
-      - name: Build frontends
-        if: github.event.inputs.target_package == 'gui'
+      - name: Build Taipy GUI front-end
+        if: ${{ github.event.inputs.target_package == 'gui' || github.event.inputs.target_package == 'taipy' }}
         run: |
-          python tools/frontend/bundle_build.py
+          python tools/frontend/bundle_build.py gui
 
-      - name: Copy files from tools
+      - name: Build Taipy front-end
+        if: ${{ github.event.inputs.target_package == 'taipy' }}
         run: |
-          cp -r tools/packages/taipy-${{ github.event.inputs.target_package }}/. ${{ steps.set-variables.outputs.package_dir }}
+          python tools/frontend/bundle_build.py taipy
 
       - name: Build Package Structure
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
         run: |
-          python tools/release/build_package_structure.py  ${{ github.event.inputs.target_package }}
-
-      - name: Copy Taipy Logger
-        if: github.event.inputs.target_package == 'common'
-        run: |
-          cp -r taipy/logger/. ${{ steps.set-variables.outputs.package_dir }}/taipy/logger
-
-      - name: Copy _cli folder
-        run: |
-          cp -r taipy/_cli/. ${{ steps.set-variables.outputs.package_dir }}/taipy/_cli
+          python tools/release/build_package_structure.py ${{ github.event.inputs.target_package }} ${{ steps.package-version.outputs.version }}
 
       - name: Build package
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
+        working-directory: "build_${{ github.event.inputs.target_package }}"
         run: |
           python -m build
-
-      - name: Rename files
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
-        run: |
-          for file in ./dist/*; do mv "$file" "${file//_/-}"; done
-
+          if compgen -G "./dist/*_*" > /dev/null; then
+            for file in ./dist/*_*; do mv "$file" "${file//_/-}"; done
+          fi
+          
       - name: Create tag and release
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
+        working-directory: "build_${{ github.event.inputs.target_package }}"
         run: |
-           if [ "${{ github.event.inputs.release_type }}" == "dev" ]; then
-            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.set-variables.outputs.release_name }} --notes "Release Draft ${{ steps.set-variables.outputs.release_name }}"
-           else
-            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --title ${{ steps.set-variables.outputs.release_name }} --notes "Release ${{ steps.set-variables.outputs.release_name }}"
-           fi
+          package_suffix=""
+          if [ "${{ github.event.inputs.target_package }}" != "taipy" ]; then
+            package_suffix="-${{ github.event.inputs.target_package }}"
+          fi
+          release_name="${{ steps.package-version.outputs.version }}$package_suffix"
+          tar_path="./dist/taipy$package_suffix-${{ steps.package-version.outputs.version }}.tar.gz"
+          if [ "${{ github.event.inputs.release_type }}" == "dev" ]; then
+            gh release create $release_name $tar_path --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title $release_name --notes "Dev Release $release_name"
+          else
+            gh release create $release_name $tar_path --target ${{ steps.extract_hash.outputs.HASH }} --title $release_name --notes "Release $release_name"
+          fi
         shell: bash
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
-      - name: Ensure Taipy release is marked as latest
+      - name: Bump patch version
+        if: ${{ github.event.inputs.release_type == 'production' }}
         run: |
-           gh release edit ${{needs.fetch-versions.outputs.taipy_VERSION}} --latest
+          python tools/release/bump_patch_version.py ${{ github.event.inputs.target_package }}
+
+      - uses: stefanzweifel/git-auto-commit-action@v5
+        if: ${{ github.event.inputs.release_type == 'production' }}
+        with:
+          branch: "devops/bump-patch-version-for-${{ github.event.inputs.target_package }}-${{ github.run_id }}"
+          create_branch: "true"
+          file_pattern: "**/version.json"
+          commit_message: Bump patch versions for ${{ github.event.inputs.target_package }}
+
+      - name: Create pull request
+        if: ${{ github.event.inputs.release_type == 'production' }}
+        run: gh pr create -B "${{ needs.setup-versions.outputs.branch }}" -H "devops/bump-patch-version-for-${{ github.event.inputs.target_package }}-${{ github.run_id }}" --title "Bump patch version" --body "Created by GitHub action build-and-release-single-package"
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+      # Ensure the latest 'taipy' production release, if there is one, is marked as *latest* no matter what
+      - name: Force latest 'taipy' production release
+        run: |
+          if [ "${{ needs.setup-versions.outputs.LATEST_TAIPY_VERSION }}" != "0.0.0" ]; then
+            gh release edit ${{ needs.setup-versions.outputs.LATEST_TAIPY_VERSION }} --latest
+          fi
         shell: bash
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

+ 165 - 131
.github/workflows/build-and-release.yml

@@ -1,19 +1,27 @@
-name: Build all taipy packages and release them
+name: Build all taipy package releases
 
 on:
   workflow_dispatch:
     inputs:
-      internal_dep_on_pypi:
-        description: "Point taipy internal dependencies to Pypi? If false it will point to the github .tar.gz release file"
-        default: "false"
+      target_version:
+        description: "Package version"
         required: true
       release_type:
-        description: "The type of release to be made (dev or production)"
-        default: "dev"
+        description: "Release type"
         required: true
-      target_version:
-        description: "The version of the package to be released"
+        type: choice
+        options:
+          - dev
+          - production
+        default: "dev"
+      sub_packages_location:
+        description: "Dependencies location"
         required: true
+        type: choice
+        options:
+          - GitHub
+          - Pypi
+        default: "GitHub"
 
 env:
   NODE_OPTIONS: --max-old-space-size=4096
@@ -23,30 +31,46 @@ permissions:
   pull-requests: write
 
 jobs:
-  fetch-versions:
+  setup-versions:
     runs-on: ubuntu-latest
     outputs:
+        branch: ${{ steps.version-setup.outputs.branch }}
         common_VERSION: ${{ steps.version-setup.outputs.common_VERSION }}
         core_VERSION: ${{ steps.version-setup.outputs.core_VERSION }}
         gui_VERSION: ${{ steps.version-setup.outputs.gui_VERSION }}
         rest_VERSION: ${{ steps.version-setup.outputs.rest_VERSION }}
         templates_VERSION: ${{ steps.version-setup.outputs.templates_VERSION }}
-        VERSION: ${{ steps.version-setup.outputs.VERSION }}
-        NEW_VERSION: ${{ steps.version-setup.outputs.NEW_VERSION }}
+        taipy_VERSION: ${{ steps.version-setup.outputs.taipy_VERSION }}
+        LATEST_TAIPY_VERSION: ${{ steps.version-setup.outputs.LATEST_TAIPY_VERSION }}
     steps:
       - uses: actions/checkout@v4
-      - name: Extract branch name
-        shell: bash
-        run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
-        id: extract_branch
 
-      - name: Setup Version
-        id: version-setup
+      - name: Validate target version
         run: |
-          python tools/release/setup_version.py ALL ${{ github.event.inputs.release_type }} ${{ github.event.inputs.target_version }} ${{ steps.extract_branch.outputs.branch }} >> $GITHUB_OUTPUT
+          version="${{ github.event.inputs.target_version }}"
+          if [[ ! "$version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
+            echo "❌ Invalid version format: '$version' - <M>.<m>.<patch> is mandatory."
+            exit 1
+          fi
+          echo "✅ Valid target version: $version"
 
-  build-and-release-taipy-packages:
-    needs: [fetch-versions]
+      - name: Install mandatory Python packages
+        run: |
+          python -m pip install --upgrade pip
+          pip install requests
+
+      - name: Setup versions
+        id: version-setup
+        run: |
+          python tools/release/setup_versions.py \
+          all \
+          -v ${{ github.event.inputs.target_version }} \
+          -t ${{ github.event.inputs.release_type }} \
+          -r ${{ github.repository }} | tee -a >>$GITHUB_OUTPUT
+          echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >>$GITHUB_OUTPUT
+
+  build-sub-packager-releases:
+    needs: setup-versions
     timeout-minutes: 20
     runs-on: ubuntu-latest
     strategy:
@@ -55,211 +79,221 @@ jobs:
       max-parallel: 1
     steps:
       - uses: actions/checkout@v4
-        with:
-          ssh-key: ${{secrets.DEPLOY_KEY}}
       - uses: actions/setup-python@v5
         with:
           python-version: 3.9
       - uses: actions/setup-node@v4
         with:
-          node-version: '20'
+          node-version: "20"
 
       - name: Extract commit hash
+        id: extract_hash
         shell: bash
         run: echo "HASH=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
-        id: extract_hash
 
-      - name: Set Build Variables
-        id: set-variables
+      - name: Install mandatory Python packages
+        run: |
+          python -m pip install --upgrade pip
+          pip install requests
+
+      - name: Update setup.requirements.txt
+        run: |
+          python tools/release/update_setup_requirements.py ${{ matrix.package }} \
+            ${{ needs.setup-versions.outputs.common_VERSION }} \
+            ${{ needs.setup-versions.outputs.core_VERSION }} \
+            ${{ needs.setup-versions.outputs.gui_VERSION }} \
+            ${{ needs.setup-versions.outputs.rest_VERSION }} \
+            ${{ needs.setup-versions.outputs.templates_VERSION }} \
+            -deps ${{ github.event.inputs.sub_packages_location }} \
+            -r ${{ github.repository }}
+
+      - name: Set package version for ${{ matrix.package }} ${{ github.event.inputs.target_version }}
+        id: package-version
+        shell: bash
         run: |
           if [ "${{ matrix.package }}" == "common" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.common_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/common" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.common_VERSION}}-common" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-common-${{needs.fetch-versions.outputs.common_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.common_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ matrix.package }}" == "core" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.core_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/core" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.core_VERSION}}-core" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-core-${{needs.fetch-versions.outputs.core_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.core_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ matrix.package }}" == "gui" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.gui_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/gui" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.gui_VERSION}}-gui" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-gui-${{needs.fetch-versions.outputs.gui_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.gui_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ matrix.package }}" == "rest" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.rest_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/rest" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.rest_VERSION}}-rest" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-rest-${{needs.fetch-versions.outputs.rest_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.rest_VERSION }}" >> $GITHUB_OUTPUT
           elif [ "${{ matrix.package }}" == "templates" ]; then
-            echo "package_version=${{needs.fetch-versions.outputs.templates_VERSION}}" >> $GITHUB_OUTPUT
-            echo "package_dir=./taipy/templates" >> $GITHUB_OUTPUT
-            echo "release_name=${{needs.fetch-versions.outputs.templates_VERSION}}-templates" >> $GITHUB_OUTPUT
-            echo "tar_path=./dist/${{ github.event.repository.name }}-templates-${{needs.fetch-versions.outputs.templates_VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+            echo "version=${{ needs.setup-versions.outputs.templates_VERSION }}" >> $GITHUB_OUTPUT
+          elif [ "${{ matrix.package }}" == "taipy" ]; then
+            echo "version=${{ needs.setup-versions.outputs.taipy_VERSION }}" >> $GITHUB_OUTPUT
           fi
-        shell: bash
-
-      - name: Update setup.requirements.txt
-        run: |
-          python tools/release/update_setup_requirements.py taipy-${{ matrix.package }} \
-            ${{needs.fetch-versions.outputs.common_VERSION}} \
-            ${{needs.fetch-versions.outputs.core_VERSION}} \
-            ${{needs.fetch-versions.outputs.gui_VERSION}} \
-            ${{needs.fetch-versions.outputs.rest_VERSION}} \
-            ${{needs.fetch-versions.outputs.templates_VERSION}} \
-            ${{ github.event.inputs.internal_dep_on_pypi }}
-
-      - name: Copy tools
-        run: |
-          cp -r tools ${{ steps.set-variables.outputs.package_dir }}
 
       - name: Install dependencies
         run: |
           python -m pip install --upgrade pip
           pip install build wheel pipenv mypy black isort
 
-      - name: Install GUI dependencies
-        if: matrix.package == 'gui'
+      - name: Build GUI front-end
+        if: ${{ matrix.package == 'gui' }}
         run: |
           pipenv install --dev
-
-      - name: Generate GUI pyi file
-        if: matrix.package == 'gui'
-        run: |
           pipenv run python tools/gui/generate_pyi.py
+          python tools/frontend/bundle_build.py gui
 
-      - name: Build frontends
-        if: matrix.package == 'gui'
+      - name: Archive the GUI front-end
+        if: ${{ matrix.package == 'gui' }}
         run: |
-          python tools/frontend/bundle_build.py
+          tar -czf gui-frontend.tar.gz taipy/gui/webapp
 
-      - name: Copy files from tools
-        run: |
-          cp -r tools/packages/taipy-${{matrix.package}}/. ${{ steps.set-variables.outputs.package_dir }}
+      - name: Upload front-end archive as an artifact
+        if: ${{ matrix.package == 'gui' }}
+        uses: actions/upload-artifact@v4
+        with:
+          name: gui-frontend
+          path: gui-frontend.tar.gz
 
-      - name: Build Package Structure
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
+      - name: Build package structure
         run: |
-          python tools/release/build_package_structure.py ${{ matrix.package }}
+          python tools/release/build_package_structure.py ${{ matrix.package }} ${{ steps.package-version.outputs.version }}
 
       - name: Build package
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
+        working-directory: "build_${{ matrix.package }}"
         run: |
           python -m build
           for file in ./dist/*; do mv "$file" "${file//_/-}"; done
 
       - name: Create tag and release
-        working-directory: ${{ steps.set-variables.outputs.package_dir }}
+        working-directory: "build_${{ matrix.package }}"
         run: |
-           if [ "${{ github.event.inputs.release_type }}" == "dev" ]; then
-            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.set-variables.outputs.release_name }} --notes "Release Draft ${{ steps.set-variables.outputs.release_name }}"
-           else
-            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --title ${{ steps.set-variables.outputs.release_name }} --notes "Release ${{ steps.set-variables.outputs.release_name }}"
-           fi
+          package_suffix="-${{ matrix.package }}"
+          release_name="${{ steps.package-version.outputs.version }}$package_suffix"
+          tar_path="./dist/taipy$package_suffix-${{ steps.package-version.outputs.version }}.tar.gz"
+          if [ "${{ github.event.inputs.release_type }}" == "dev" ]; then
+            gh release create $release_name $tar_path --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title $release_name --notes "Dev Release $release_name"
+          else
+            gh release create $release_name $tar_path --target ${{ steps.extract_hash.outputs.HASH }} --title $release_name --notes "Release $release_name"
+          fi
         shell: bash
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
-  build-and-release-taipy:
+  build-taipy-release:
     runs-on: ubuntu-latest
-    needs: [build-and-release-taipy-packages, fetch-versions]
+    needs: [setup-versions, build-sub-packager-releases]
     timeout-minutes: 20
     steps:
       - uses: actions/checkout@v4
+      - uses: actions/setup-python@v5
+        with:
+          python-version: 3.9
+      - uses: actions/setup-node@v4
         with:
-          ssh-key: ${{secrets.DEPLOY_KEY}}
+          node-version: "20"
+
       - name: Extract commit hash
+        id: extract_hash
         shell: bash
         run: echo "HASH=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
-        id: extract_hash
 
-      - name: Set Build Variables
-        id: set-variables
+      - name: Install mandatory Python packages
         run: |
-          echo "package_version=${{needs.fetch-versions.outputs.VERSION}}" >> $GITHUB_OUTPUT
-          echo "release_name=${{needs.fetch-versions.outputs.VERSION}}" >> $GITHUB_OUTPUT
-          echo "tar_path=./dist/${{ github.event.repository.name }}-${{needs.fetch-versions.outputs.VERSION}}.tar.gz" >> $GITHUB_OUTPUT
+          python -m pip install --upgrade pip
+          pip install requests
 
       - name: Update setup.requirements.txt
         run: |
           python tools/release/update_setup_requirements.py taipy \
-            ${{needs.fetch-versions.outputs.common_VERSION}} \
-            ${{needs.fetch-versions.outputs.core_VERSION}} \
-            ${{needs.fetch-versions.outputs.gui_VERSION}} \
-            ${{needs.fetch-versions.outputs.rest_VERSION}} \
-            ${{needs.fetch-versions.outputs.templates_VERSION}} \
-            ${{ github.event.inputs.internal_dep_on_pypi }}
+            ${{ needs.setup-versions.outputs.common_VERSION }} \
+            ${{ needs.setup-versions.outputs.core_VERSION }} \
+            ${{ needs.setup-versions.outputs.gui_VERSION }} \
+            ${{ needs.setup-versions.outputs.rest_VERSION }} \
+            ${{ needs.setup-versions.outputs.templates_VERSION }} \
+            -deps ${{ github.event.inputs.sub_packages_location }} \
+            -r ${{ github.repository }}
 
       - name: Install dependencies
         run: |
           python -m pip install --upgrade pip
-          pip install build wheel
+          pip install build wheel pipenv mypy black isort
 
-      - name: Backup setup.py
-        run: |
-          mv setup.py setup.old.py
+      - uses: actions/download-artifact@v4
+        with:
+          name: gui-frontend
+          path: .
+
+      - name: Retrieve the GUI front-end
+        run: tar -xzf gui-frontend.tar.gz
 
-      - name: Copy files from tools
+      - name: Build Taipy front-end
         run: |
-          cp -r tools/packages/taipy/. .
+          python tools/frontend/bundle_build.py taipy
 
-      - name: Build Frontend
+      - name: Build package structure
         run: |
-          python tools/frontend/bundle_build.py
+          python tools/release/build_package_structure.py taipy ${{ needs.setup-versions.outputs.taipy_VERSION }}
 
-      - name: Build Taipy package
+      - name: Build package
+        working-directory: "build_taipy"
         run: |
           python -m build
+          if compgen -G "./dist/*_*" > /dev/null; then
+            for file in ./dist/*_*; do mv "$file" "${file//_/-}"; done
+          fi
 
       - name: Create tag and release Taipy
+        working-directory: "build_taipy"
         run: |
+          release_name="${{ needs.setup-versions.outputs.taipy_VERSION }}"
+          tar_path="./dist/taipy-$release_name.tar.gz"
           if [ "${{ github.event.inputs.release_type }}" == "dev" ]; then
-            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.set-variables.outputs.release_name }} --notes "Release Draft ${{ steps.set-variables.outputs.release_name }}"
+            gh release create $release_name $tar_path --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title $release_name --notes "Dev Release $release_name"
           else
-            gh release create ${{ steps.set-variables.outputs.release_name }} ${{ steps.set-variables.outputs.tar_path }} --target ${{ steps.extract_hash.outputs.HASH }} --title ${{ steps.set-variables.outputs.release_name }} --notes "Release ${{ steps.set-variables.outputs.release_name }}"
+            gh release create $release_name $tar_path --target ${{ steps.extract_hash.outputs.HASH }} --title $release_name --notes "Release $release_name"
           fi
         shell: bash
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
       - name: Download packages
+        working-directory: "build_taipy"
         run: |
-          gh release download ${{ needs.fetch-versions.outputs.common_VERSION }}-common --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.core_VERSION }}-core --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.gui_VERSION }}-gui --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.rest_VERSION }}-rest --skip-existing --dir dist
-          gh release download ${{ needs.fetch-versions.outputs.templates_VERSION }}-templates --skip-existing --dir dist
+          gh release download ${{ needs.setup-versions.outputs.common_VERSION }}-common --skip-existing --dir dist
+          gh release download ${{ needs.setup-versions.outputs.core_VERSION }}-core --skip-existing --dir dist
+          gh release download ${{ needs.setup-versions.outputs.gui_VERSION }}-gui --skip-existing --dir dist
+          gh release download ${{ needs.setup-versions.outputs.rest_VERSION }}-rest --skip-existing --dir dist
+          gh release download ${{ needs.setup-versions.outputs.templates_VERSION }}-templates --skip-existing --dir dist
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
       - name: Bundle all packages in main release tag
+        working-directory: "build_taipy"
         run: |
-          find dist -type f -print0 | xargs -r0 gh release upload ${{ needs.fetch-versions.outputs.VERSION }} --clobber
+          find dist -type f -print0 | xargs -r0 gh release upload ${{ needs.setup-versions.outputs.taipy_VERSION }} --clobber
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
-      - name: Bump Version
-        if: github.event.inputs.release_type == 'dev'
-        id: bump-version
+      - name: Bump patch versions
+        if: ${{ github.event.inputs.release_type == 'production' }}
         run: |
-          python tools/release/bump_version.py
+          python tools/release/bump_patch_version.py all
 
       - uses: stefanzweifel/git-auto-commit-action@v5
-        if: github.event.inputs.release_type == 'dev'
+        if: ${{ github.event.inputs.release_type == 'production' }}
         with:
-          branch: "feature/update-dev-version-${{ github.run_id }}"
-          create_branch: 'true'
-          file_pattern: '**/version.json'
-          commit_message: Update version to ${{ needs.fetch-versions.outputs.NEW_VERSION }}
-
-      - name: create pull request
-        if: github.event.inputs.release_type == 'dev'
-        run: gh pr create -B develop -H "feature/update-dev-version-${{ github.run_id }}" --title 'Update Dev Version' --body 'Created by Github action'
+          branch: "devops/bump-patch-version-${{ github.run_id }}"
+          create_branch: "true"
+          file_pattern: "**/version.json"
+          commit_message: Bump patch versions for ${{ needs.setup-versions.outputs.taipy_VERSION }}
+
+      - name: Create pull request
+        if: ${{ github.event.inputs.release_type == 'production' }}
+        run: gh pr create -B "${{ needs.setup-versions.outputs.branch }}" -H "devops/bump-patch-version-${{ github.run_id }}" --title "Bump patch version" --body "Created by GitHub action build-and-release"
         env:
           GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
 
-      - name: Reset changes
+      # Ensure the latest 'taipy' production release, if there is one, is marked as *latest* no matter what
+      - name: Force latest 'taipy' production release
         run: |
-          git reset --hard HEAD
-          git clean -fdx
+          if [ "${{ needs.setup-versions.outputs.LATEST_TAIPY_VERSION }}" != "0.0.0" ]; then
+            gh release edit ${{ needs.setup-versions.outputs.LATEST_TAIPY_VERSION }} --latest
+          fi
+        shell: bash
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

+ 1 - 0
Pipfile

@@ -39,6 +39,7 @@ charset-normalizer = "==3.3.2"
 numpy = "<2.0.0"
 
 [dev-packages]
+fastparquet = "*"
 freezegun = "*"
 ipython = "*"
 ipykernel = "*"

+ 54 - 0
doc/gui/examples/charts/advanced_animation.py

@@ -0,0 +1,54 @@
+# Copyright 2021-2025 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+# -----------------------------------------------------------------------------------------
+# To execute this script, make sure that the taipy-gui package is installed in your
+# Python environment and run:
+#     python <script>
+# -----------------------------------------------------------------------------------------
+from math import ceil, cos
+
+from taipy.gui import Gui
+
+# Available waveforms to choose from
+waveforms = ["Sine", "Square"]
+# The initially selected waveform
+waveform = waveforms[0]
+
+# Values for the x axis
+x_range = range(100)
+# Data for the 'Sine' waveform
+cos_data = [cos(i / 6) for i in x_range]
+# Data for the 'Square' waveform
+square_data = [1 if ceil(i / 24) % 2 == 0 else -1 for i in x_range]
+
+# Dataset used by the chart
+data = {
+    "x": x_range,
+    "y": cos_data,
+}
+
+animation_data = None
+
+
+# Triggered when the selected waveform changes
+def change_data(state):
+    # Animate by setting the 'y' values to the selected waveform's
+    state.animation_data = {"y": cos_data if state.waveform == waveforms[0] else square_data}
+
+
+page = """
+<|{waveform}|toggle|lov={waveforms}|on_change=change_data|>
+<|{data}|chart|mode=lines+markers|x=x|y=y|animation_data={animation_data}|>
+"""
+
+
+if __name__ == "__main__":
+    Gui(page).run(title="Chart - Advanced - Animation")

+ 4 - 4
frontend/taipy-gui/base/src/app.ts

@@ -40,7 +40,7 @@ export class TaipyApp {
     _cookieHandler: CookieHandler | undefined;
     variableData: DataManager | undefined;
     functionData: DataManager | undefined;
-    appId: string;
+    guiAddr: string;
     clientId: string;
     context: string;
     metadata: Record<string, unknown>;
@@ -63,7 +63,7 @@ export class TaipyApp {
         this.clientId = "";
         this.context = "";
         this.metadata = {};
-        this.appId = "";
+        this.guiAddr = "";
         this.routes = undefined;
         this.path = path;
         this.socket = socket;
@@ -168,7 +168,7 @@ export class TaipyApp {
     init() {
         this.clientId = "";
         this.context = "";
-        this.appId = "";
+        this.guiAddr = "";
         this.routes = undefined;
         const id = getLocalStorageValue(TAIPY_CLIENT_ID, "");
         this.sendWsMessage("ID", TAIPY_CLIENT_ID, id);
@@ -180,7 +180,7 @@ export class TaipyApp {
     }
 
     initApp() {
-        this.sendWsMessage("AID", "connect", "");
+        this.sendWsMessage("GA", "connect", "");
         this.sendWsMessage("GR", "", "");
     }
 

+ 2 - 2
frontend/taipy-gui/base/src/packaging/taipy-gui-base.d.ts

@@ -73,7 +73,7 @@ export type WsMessageType =
     | "ACK"
     | "GMC"
     | "GDT"
-    | "AID"
+    | "GA"
     | "GR"
     | "FV"
     | "BC"
@@ -125,7 +125,7 @@ export declare class TaipyApp {
     _cookieHandler: CookieHandler | undefined;
     variableData: DataManager | undefined;
     functionData: DataManager | undefined;
-    appId: string;
+    guiAddr: string;
     clientId: string;
     context: string;
     metadata: Record<string, unknown>;

+ 2 - 2
frontend/taipy-gui/base/src/socket.ts

@@ -5,7 +5,7 @@ import { TaipyApp } from "./app";
 export const initSocket = (socket: Socket, taipyApp: TaipyApp) => {
     socket.on("connect", () => {
         taipyApp.onWsMessageEvent("connect", null);
-        if (taipyApp.clientId === "" || taipyApp.appId === "") {
+        if (taipyApp.clientId === "" || taipyApp.guiAddr === "") {
             taipyApp.init();
         }
     });
@@ -13,7 +13,7 @@ export const initSocket = (socket: Socket, taipyApp: TaipyApp) => {
     socket.io.on("reconnect", () => {
         taipyApp.onWsMessageEvent("reconnect", null);
         console.log("WebSocket reconnected");
-        taipyApp.sendWsMessage("AID", "reconnect", taipyApp.appId);
+        taipyApp.sendWsMessage("GA", "reconnect", taipyApp.guiAddr);
     });
     // try to reconnect on connect_error
     socket.on("connect_error", (err) => {

+ 5 - 5
frontend/taipy-gui/base/src/wsAdapter.ts

@@ -25,8 +25,8 @@ export class TaipyWsAdapter extends WsAdapter {
     initWsMessageTypes: string[];
     constructor() {
         super();
-        this.supportedMessageTypes = ["MU", "ID", "GMC", "GDT", "AID", "GR", "AL", "ACK"];
-        this.initWsMessageTypes = ["ID", "AID", "GMC"];
+        this.supportedMessageTypes = ["MU", "ID", "GMC", "GDT", "GA", "GR", "AL", "ACK"];
+        this.initWsMessageTypes = ["ID", "GA", "GMC"];
     }
     handleWsMessage(message: WsMessage, taipyApp: TaipyApp): boolean {
         if (message.type) {
@@ -86,13 +86,13 @@ export class TaipyWsAdapter extends WsAdapter {
                     taipyApp.functionData = new DataManager(functionData);
                     taipyApp.onInitEvent();
                 }
-            } else if (message.type === "AID") {
+            } else if (message.type === "GA") {
                 const payload = message.payload as Record<string, unknown>;
                 if (payload.name === "reconnect") {
                     taipyApp.init();
                     return true;
                 }
-                taipyApp.appId = payload.id as string;
+                taipyApp.guiAddr = payload.id as string;
             } else if (message.type === "GR") {
                 const payload = message.payload as [string, string][];
                 taipyApp.routes = payload;
@@ -114,7 +114,7 @@ export class TaipyWsAdapter extends WsAdapter {
         if (
             this.initWsMessageTypes.includes(message.type) &&
             taipyApp.clientId !== "" &&
-            taipyApp.appId !== "" &&
+            taipyApp.guiAddr !== "" &&
             taipyApp.context !== "" &&
             taipyApp.routes !== undefined
         ) {

+ 148 - 8
frontend/taipy-gui/src/components/Taipy/Chart.tsx

@@ -17,17 +17,23 @@ import { useTheme } from "@mui/material";
 import Box from "@mui/material/Box";
 import Skeleton from "@mui/material/Skeleton";
 import Tooltip from "@mui/material/Tooltip";
+import isEqual from "lodash/isEqual";
 import merge from "lodash/merge";
 import { nanoid } from "nanoid";
 import {
+    AnimationOpts,
     Config,
     Data,
+    Frame,
     Layout,
     ModeBarButtonAny,
     PlotDatum,
+    PlotData,
+    PlotlyHTMLElement,
     PlotMarker,
     PlotRelayoutEvent,
     PlotSelectionEvent,
+    Root,
     ScatterLine,
 } from "plotly.js";
 import { Figure } from "react-plotly.js";
@@ -53,6 +59,14 @@ import { getArrayValue, getUpdateVar, TaipyActiveProps, TaipyChangeProps } from
 
 const Plot = lazy(() => import("react-plotly.js"));
 
+interface PlotlyObject {
+    animate: (
+        root: Root,
+        frameOrGroupNameOrFrameList?: string | string[] | Partial<Frame> | Array<Partial<Frame>>,
+        opts?: Partial<AnimationOpts>
+    ) => Promise<void>;
+}
+
 interface ChartProp extends TaipyActiveProps, TaipyChangeProps {
     title?: string;
     defaultTitle?: string;
@@ -61,6 +75,7 @@ interface ChartProp extends TaipyActiveProps, TaipyChangeProps {
     defaultConfig: string;
     config?: string;
     data?: Record<string, TraceValueType>;
+    animationData?: Record<string, TraceValueType>;
     //data${number}?: Record<string, TraceValueType>;
     defaultLayout?: string;
     layout?: string;
@@ -187,15 +202,28 @@ const selectedPropRe = /selected(\d+)/;
 
 const MARKER_TO_COL = ["color", "size", "symbol", "opacity", "colors"];
 
+const DEFAULT_ANIMATION_SETTINGS: Partial<AnimationOpts> = {
+    transition: {
+        duration: 500,
+        easing: "cubic-in-out",
+    },
+    frame: {
+        duration: 500,
+    },
+    mode: "immediate",
+};
+
 const isOnClick = (types: string[]) => (types?.length ? types.every((t) => t === "pie") : false);
 
 interface Axis {
     p2c: () => number;
     p2d: (a: number) => number;
 }
+
 interface PlotlyMap {
     _subplot?: { xaxis: Axis; yaxis: Axis };
 }
+
 interface PlotlyDiv extends HTMLDivElement {
     _fullLayout?: {
         map?: PlotlyMap;
@@ -206,6 +234,13 @@ interface PlotlyDiv extends HTMLDivElement {
     };
 }
 
+interface ExtendedPlotData extends PlotData {
+    meta?: {
+        xAxisName?: string;
+        yAxisName?: string;
+    };
+}
+
 interface WithPointNumbers {
     pointNumbers: number[];
 }
@@ -308,15 +343,23 @@ const Chart = (props: ChartProp) => {
         onRangeChange,
         propagate = true,
         onClick,
+        animationData,
     } = props;
     const dispatch = useDispatch();
     const [selected, setSelected] = useState<number[][]>([]);
-    const plotRef = useRef<HTMLDivElement>(null);
+    const plotRef = useRef<HTMLDivElement | null>(null);
+    const plotlyRef = useRef<PlotlyObject | null>(null);
     const [dataKeys, setDataKeys] = useState<string[]>([]);
-    const lastDataPl = useRef<Data[]>([]);
+
+    // animation
+    const [toFrame, setToFrame] = useState<Partial<Frame>>({
+        data: [],
+        traces: [],
+    });
+
+    const lastDataPl = useRef<ExtendedPlotData[]>([]);
     const theme = useTheme();
     const module = useModule();
-
     const className = useClassNames(props.libClassName, props.dynamicClassName, props.className);
     const active = useDynamicProperty(props.active, props.defaultActive, true);
     const render = useDynamicProperty(props.render, props.defaultRender, true);
@@ -489,6 +532,31 @@ const Chart = (props: ChartProp) => {
         props.figure,
     ]);
 
+    useEffect(() => {
+        if (animationData?.__taipy_refresh) {
+            const animationDataVar = getUpdateVar(updateVars || "", "animationData");
+            animationDataVar &&
+                dispatch(createRequestChartUpdateAction(animationDataVar, id, module, [], "", undefined));
+        }
+    }, [animationData?.__taipy_refresh, dispatch, id, module, updateVars]);
+
+    const runAnimation = useCallback(() => {
+        return (
+            plotRef.current &&
+            plotlyRef.current &&
+            toFrame.data &&
+            (toFrame.traces && toFrame.traces.length > 0 ? true : null) &&
+            plotlyRef.current.animate(
+                plotRef.current as unknown as PlotlyHTMLElement,
+                {
+                    ...toFrame,
+                    layout: layout,
+                },
+                DEFAULT_ANIMATION_SETTINGS
+            )
+        );
+    }, [layout, toFrame]);
+
     const style = useMemo(
         () =>
             height === undefined
@@ -589,6 +657,10 @@ const Chart = (props: ChartProp) => {
             ret.xaxis = config.xaxis[idx];
             ret.yaxis = config.yaxis[idx];
             ret.hovertext = getValue(datum, config.labels, idx, true);
+            ret.meta = {
+                xAxisName: config.traces[idx][0],
+                yAxisName: config.traces[idx][1],
+            };
             const selPoints = getArrayValue(selected, idx, []);
             if (selPoints?.length) {
                 ret.selectedpoints = selPoints;
@@ -603,10 +675,10 @@ const Chart = (props: ChartProp) => {
             if (idx == 0) {
                 baseDataPl = ret;
             }
-            return ret as Data;
+            return ret;
         });
         if (changed) {
-            lastDataPl.current = newDataPl;
+            lastDataPl.current = newDataPl as ExtendedPlotData[];
         }
         return lastDataPl.current;
     }, [props.figure, selected, data, additionalDatas, config, dataKeys]);
@@ -696,7 +768,7 @@ const Chart = (props: ChartProp) => {
                 (evt?.currentTarget as PlotlyDiv)?._fullLayout?.geo ||
                 (evt?.currentTarget as PlotlyDiv)?._fullLayout?.mapbox;
             const xaxis = map ? map._subplot?.xaxis : (evt?.currentTarget as PlotlyDiv)?._fullLayout?.xaxis;
-            const yaxis = map ? map._subplot?.xaxis : (evt?.currentTarget as PlotlyDiv)?._fullLayout?.yaxis;
+            const yaxis = map ? map._subplot?.yaxis : (evt?.currentTarget as PlotlyDiv)?._fullLayout?.yaxis;
             if (!xaxis || !yaxis) {
                 console.info("clickHandler: Plotly div does not have an xaxis object", evt);
                 return;
@@ -725,8 +797,14 @@ const Chart = (props: ChartProp) => {
     const onInitialized = useCallback(
         (figure: Readonly<Figure>, graphDiv: Readonly<HTMLElement>) => {
             onClick && graphDiv.addEventListener("click", clickHandler);
+            plotRef.current = graphDiv as HTMLDivElement;
+            plotlyRef.current = window.Plotly as unknown as PlotlyObject;
+
+            if (animationData) {
+                runAnimation()?.catch(console.error);
+            }
         },
-        [onClick, clickHandler]
+        [onClick, clickHandler, animationData, runAnimation]
     );
 
     const getRealIndex = useCallback(
@@ -792,9 +870,71 @@ const Chart = (props: ChartProp) => {
         [getRealIndex, dispatch, updateVars, propagate, props.onChange, config.traces.length, module]
     );
 
+    useEffect(() => {
+        if (!dataPl.length || !animationData || isDataRefresh(animationData)) {
+            return;
+        }
+        const animationKeys = Object.keys(animationData) as Array<keyof ExtendedPlotData>;
+
+        let found = false;
+        const toFramesData = dataPl
+            .map((trace) => {
+                const traceAnimationKeys = animationKeys.filter(
+                    (key) => trace.hasOwnProperty(key) && Array.isArray(trace[key]) && Array.isArray(animationData[key])
+                );
+                if (!traceAnimationKeys.length) {
+                    return undefined;
+                }
+                return traceAnimationKeys.reduce(
+                    (tr, key) => {
+                        if (!isEqual(trace[key], animationData[key])) {
+                            found = true;
+                            tr[key] = animationData[key];
+                        }
+                        return tr;
+                    },
+                    { ...trace } as Record<string, unknown>
+                ) as unknown as ExtendedPlotData;
+            })
+            .filter((t) => t);
+        if (!found) {
+            return;
+        }
+
+        if (toFramesData.length) {
+            setToFrame({
+                data: toFramesData as Data[],
+                traces: dataPl.map((_, idx) => idx),
+            });
+        }
+    }, [dataPl, animationData]);
+
+    useEffect(() => {
+        if (!plotRef.current || !toFrame.data?.length) {
+            return;
+        }
+
+        const plotElement = plotRef.current as unknown as PlotlyHTMLElement;
+        if (!plotElement?.data) {
+            return;
+        }
+
+        const timer = setTimeout(() => {
+            if (plotRef.current) {
+                runAnimation()?.catch(console.error);
+            }
+        }, 100);
+
+        return () => {
+            if (timer) {
+                clearTimeout(timer);
+            }
+        };
+    }, [toFrame.data?.length, runAnimation]);
+
     return render ? (
         <Tooltip title={hover || ""}>
-            <Box id={id} className={`${className} ${getComponentClassName(props.children)}`} ref={plotRef}>
+            <Box id={props.id} className={`${className} ${getComponentClassName(props.children)}`} ref={plotRef}>
                 <Suspense fallback={<Skeleton key="skeleton" sx={skelStyle} />}>
                     {Array.isArray(props.figure) && props.figure.length && props.figure[0].data !== undefined ? (
                         <Plot

+ 1 - 1
frontend/taipy-gui/src/components/Taipy/Expandable.spec.tsx

@@ -24,7 +24,7 @@ describe("Expandable Component", () => {
     it("renders", async () => {
         const { getByText } = render(<Expandable title="foo">bar</Expandable>);
         const elt = getByText("foo");
-        expect(elt.tagName).toBe("SPAN");
+        expect(elt.tagName).toBe("DIV");
     });
     it("displays the right info for string", async () => {
         const { getByText } = render(

+ 1 - 0
frontend/taipy-gui/src/components/Taipy/Input.tsx

@@ -375,6 +375,7 @@ const Input = (props: TaipyInputProps) => {
                     onKeyDown={handleAction}
                     multiline={multiline}
                     minRows={linesShown}
+                    maxRows={linesShown}
                     size={size}
                 />
                 {props.children}

+ 4 - 1
frontend/taipy-gui/src/components/Taipy/Metric.tsx

@@ -190,8 +190,11 @@ const Metric = (props: MetricProps) => {
         if (template) {
             layout.template = template;
         }
+
         if (props.title) {
-            layout.title = { text: props.title };
+            layout.title = {
+                text: props.title
+            }
         }
         return layout as Partial<Layout>;
     }, [

+ 1 - 1
frontend/taipy-gui/src/components/Taipy/tableUtils.tsx

@@ -272,7 +272,7 @@ const formatValue = (
             return getNumberString(val as number, col.format, formatConf);
         default:
             return val
-                ? lineBreak && (col.lineBreak === undefined || col.lineBreak)
+                ? lineBreak && (col.lineBreak === undefined || col.lineBreak) && typeof val === "string"
                     ? (val as string).split("\n").map((p, i) =>
                           i == 0 ? (
                               p

+ 3 - 2
frontend/taipy-gui/src/context/taipyReducers.spec.ts

@@ -1142,7 +1142,7 @@ describe("initializeWebSocket function", () => {
                 mockSocket,
                 "ID",
                 "TaipyClientId",
-                { "id": "mockId" },
+                { id: "mockId" },
                 "mockId",
                 undefined,
                 false,
@@ -1167,8 +1167,9 @@ describe("initializeWebSocket function", () => {
         initializeWebSocket(mockSocket, mockDispatch);
         const connectErrorCallback = mockSocket.on.mock.calls.find((call) => call[0] === "connect_error")?.[1];
         expect(connectErrorCallback).toBeDefined();
+
         if (connectErrorCallback) {
-            connectErrorCallback();
+            connectErrorCallback(new Error("connect_error"));
             jest.advanceTimersByTime(500);
             expect(mockSocket.connect).toHaveBeenCalled();
         }

+ 15 - 12
frontend/taipy-gui/src/context/taipyReducers.ts

@@ -24,7 +24,7 @@ import { getBaseURL, TIMEZONE_CLIENT } from "../utils";
 import { parseData } from "../utils/dataFormat";
 import { MenuProps } from "../utils/lov";
 import { changeFavicon, getLocalStorageValue, IdMessage, storeClientId } from "./utils";
-import { lightenPayload, sendWsMessage, TAIPY_APP_ID, TAIPY_CLIENT_ID, WsMessage } from "./wsUtils";
+import { lightenPayload, sendWsMessage, TAIPY_GUI_ADDR, TAIPY_CLIENT_ID, WsMessage } from "./wsUtils";
 
 export enum Types {
     SocketConnected = "SOCKET_CONNECTED",
@@ -242,8 +242,8 @@ export const messageToAction = (message: WsMessage) => {
             changeFavicon((message.payload as Record<string, string>)?.value);
         } else if (message.type == "BC") {
             stackBroadcast((message as NamePayload).name, (message as NamePayload).payload.value);
-        } else if (message.type == "AID") {
-            checkAppId((message.payload as Record<string, string>).id);
+        } else if (message.type == "GA") {
+            checkGuiAddr((message.payload as Record<string, string>).id);
         }
     }
     return {} as TaipyBaseAction;
@@ -288,16 +288,19 @@ const initializeBroadcastManagement = (dispatch: Dispatch<TaipyBaseAction>) => {
     }, broadcast_timeout);
 };
 
-// App id
-const checkAppId = (appId: string) => {
-    if (!appId) {
+// Gui Address
+const checkGuiAddr = (guiAddr: string) => {
+    if (!guiAddr) {
         return;
     }
-    appId = `${appId}`;
-    const localAppId = getLocalStorageValue(TAIPY_APP_ID, "");
-    if (!localAppId || localAppId !== appId) {
-        localStorage && localStorage.setItem(TAIPY_APP_ID, appId);
-        localAppId && window.location.assign(getBaseURL());
+    guiAddr = `${guiAddr}`;
+    const localGuiAddr = getLocalStorageValue(TAIPY_GUI_ADDR, "");
+    if (!localGuiAddr || localGuiAddr !== guiAddr) {
+        localStorage && localStorage.setItem(TAIPY_GUI_ADDR, guiAddr);
+        if (localGuiAddr) {
+            console.info("Taipy GUI address changed, reloading the page");
+            window.location.assign(getBaseURL());
+        }
     }
 };
 
@@ -311,7 +314,7 @@ export const initializeWebSocket = (socket: Socket | undefined, dispatch: Dispat
             const id = getLocalStorageValue(TAIPY_CLIENT_ID, "");
             const payload: Record<string, unknown> = { id };
             if (lastReasonServer) {
-                payload["app_id"] = Number(getLocalStorageValue(TAIPY_APP_ID, ""));
+                payload["gui_addr"] = Number(getLocalStorageValue(TAIPY_GUI_ADDR, ""));
             }
             sendWsMessage(socket, "ID", TAIPY_CLIENT_ID, payload, id, undefined, false, () => {
                 dispatch({ type: Types.SocketConnected });

+ 2 - 2
frontend/taipy-gui/src/context/wsUtils.ts

@@ -2,7 +2,7 @@ import { Socket } from "socket.io-client";
 import { nanoid } from 'nanoid'
 
 export const TAIPY_CLIENT_ID = "TaipyClientId";
-export const TAIPY_APP_ID = "TaipyAppId";
+export const TAIPY_GUI_ADDR = "TaipyGuiAddr";
 
 export type WsMessageType =
     | "A"
@@ -20,7 +20,7 @@ export type WsMessageType =
     | "ACK"
     | "GMC"
     | "GDT"
-    | "AID"
+    | "GA"
     | "GR"
     | "FV"
     | "BC"

+ 96 - 96
frontend/taipy-gui/webpack.config.js

@@ -16,31 +16,31 @@ const path = require("path");
 const webpack = require("webpack");
 const CopyWebpackPlugin = require("copy-webpack-plugin");
 const HtmlWebpackPlugin = require("html-webpack-plugin");
-const AddAssetHtmlPlugin = require('add-asset-html-webpack-plugin');
+const AddAssetHtmlPlugin = require("add-asset-html-webpack-plugin");
 const ESLintPlugin = require("eslint-webpack-plugin");
-const GenerateJsonPlugin = require('generate-json-webpack-plugin');
+const GenerateJsonPlugin = require("generate-json-webpack-plugin");
 
 const resolveApp = relativePath => path.resolve(__dirname, relativePath);
 
-const reactBundle = "taipy-gui-deps"
-const taipyBundle = "taipy-gui"
+const reactBundle = "taipy-gui-deps";
+const taipyBundle = "taipy-gui";
 
-const reactBundleName = "TaipyGuiDependencies"
-const taipyBundleName = "TaipyGui"
-const taipyGuiBaseBundleName = "TaipyGuiBase"
+const reactBundleName = "TaipyGuiDependencies";
+const taipyBundleName = "TaipyGui";
+const taipyGuiBaseBundleName = "TaipyGuiBase";
 
 const basePath = "../../taipy/gui/webapp";
 const webAppPath = resolveApp(basePath);
 const reactManifestPath = resolveApp(basePath + "/" + reactBundle + "-manifest.json");
-const reactDllPath = resolveApp(basePath + "/" + reactBundle + ".dll.js")
-const taipyDllPath = resolveApp(basePath + "/" + taipyBundle + ".js")
+const reactDllPath = resolveApp(basePath + "/" + reactBundle + ".dll.js");
+const taipyDllPath = resolveApp(basePath + "/" + taipyBundle + ".js");
 const taipyGuiBaseExportPath = resolveApp(basePath + "/taipy-gui-base-export");
 
 module.exports = (env, options) => {
     const envVariables = {
-        frontend_version: require(resolveApp('package.json')).version,
+        frontend_version: require(resolveApp("package.json")).version,
         frontend_build_date: new Date().toISOString(),
-        frontend_build_mode: options.mode
+        frontend_build_mode: options.mode,
     };
 
     return [{
@@ -71,7 +71,7 @@ module.exports = (env, options) => {
                 path: webAppPath,
                 library: {
                     name: taipyBundleName,
-                    type: "umd"
+                    type: "umd",
                 },
                 publicPath: "",
             },
@@ -96,7 +96,7 @@ module.exports = (env, options) => {
                             fullySpecified: false,
                         },
                     },
-                ]
+                ],
             },
             plugins: [
                 new ESLintPlugin({
@@ -106,9 +106,9 @@ module.exports = (env, options) => {
                 }),
                 new webpack.DllReferencePlugin({
                     name: reactBundleName,
-                    manifest: reactManifestPath
-                })
-            ]
+                    manifest: reactManifestPath,
+                }),
+            ],
         },
         {
             mode: options.mode, //'development', //'production',
@@ -120,7 +120,7 @@ module.exports = (env, options) => {
                 publicPath: "",
             },
             dependencies: [taipyBundleName, reactBundleName],
-            externals: {"taipy-gui": taipyBundleName},
+            externals: { "taipy-gui": taipyBundleName },
 
             // Enable sourcemaps for debugging webpack's output.
             devtool: options.mode === "development" && "inline-source-map",
@@ -136,20 +136,20 @@ module.exports = (env, options) => {
                         use: "ts-loader",
                         exclude: /node_modules/,
                     },
-                ]
+                ],
             },
 
             plugins: [
                 new CopyWebpackPlugin({
                     patterns: [
                         { from: "../public", filter: (name) => !name.endsWith(".html") },
-                        { from: "../packaging", filter: (name) => !name.includes(".gen.") }
+                        { from: "../packaging", filter: (name) => !name.includes(".gen.") },
                     ],
                 }),
                 new HtmlWebpackPlugin({
                     template: "../public/index.html",
                     hash: true,
-                    ...envVariables
+                    ...envVariables,
                 }),
                 new GenerateJsonPlugin("taipy.status.json", envVariables),
                 new ESLintPlugin({
@@ -159,94 +159,94 @@ module.exports = (env, options) => {
                 }),
                 new webpack.DllReferencePlugin({
                     name: reactBundleName,
-                    manifest: reactManifestPath
+                    manifest: reactManifestPath,
                 }),
                 new AddAssetHtmlPlugin([{
                     filepath: reactDllPath,
-                    hash: true
-                },{
+                    hash: true,
+                }, {
                     filepath: taipyDllPath,
-                    hash: true
+                    hash: true,
                 }]),
             ],
-    },
-    {
-        mode: options.mode,
-        target: "web",
-        entry: {
-            "default": "./base/src/index.ts",
         },
-        output: {
-            filename: (arg) => {
-                if (arg.chunk.name === "default") {
-                    return "taipy-gui-base.js";
-                }
-                return "[name].taipy-gui-base.js";
-            },
-            chunkFilename: "[name].taipy-gui-base.js",
-            path: webAppPath,
-            globalObject: "this",
-            library: {
-                name: taipyGuiBaseBundleName,
-                type: "umd",
+        {
+            mode: options.mode,
+            target: "web",
+            entry: {
+                "default": "./base/src/index.ts",
             },
-        },
-        optimization: {
-            splitChunks: {
-                chunks: 'all',
-                name: "shared",
+            output: {
+                filename: (arg) => {
+                    if (arg.chunk.name === "default") {
+                        return "taipy-gui-base.js";
+                    }
+                    return "[name].taipy-gui-base.js";
+                },
+                chunkFilename: "[name].taipy-gui-base.js",
+                path: webAppPath,
+                globalObject: "this",
+                library: {
+                    name: taipyGuiBaseBundleName,
+                    type: "umd",
+                },
             },
-        },
-        module: {
-            rules: [
-                {
-                    test: /\.tsx?$/,
-                    use: "ts-loader",
-                    exclude: /node_modules/,
+            optimization: {
+                splitChunks: {
+                    chunks: "all",
+                    name: "shared",
                 },
-            ],
-        },
-        resolve: {
-            extensions: [".tsx", ".ts", ".js", ".tsx"],
-        },
-        // externals: {
-        //     "socket.io-client": {
-        //         commonjs: "socket.io-client",
-        //         commonjs2: "socket.io-client",
-        //         amd: "socket.io-client",
-        //         root: "_",
-        //     },
-        // },
-    },
-    {
-        entry: "./base/src/exports.ts",
-        output: {
-            filename: "taipy-gui-base.js",
-            path: taipyGuiBaseExportPath,
-            library: {
-                name: taipyGuiBaseBundleName,
-                type: "umd",
             },
-            publicPath: "",
+            module: {
+                rules: [
+                    {
+                        test: /\.tsx?$/,
+                        use: "ts-loader",
+                        exclude: /node_modules/,
+                    },
+                ],
+            },
+            resolve: {
+                extensions: [".tsx", ".ts", ".js", ".tsx"],
+            },
+            // externals: {
+            //     "socket.io-client": {
+            //         commonjs: "socket.io-client",
+            //         commonjs2: "socket.io-client",
+            //         amd: "socket.io-client",
+            //         root: "_",
+            //     },
+            // },
         },
-        module: {
-            rules: [
-                {
-                    test: /\.tsx?$/,
-                    use: "ts-loader",
-                    exclude: /node_modules/,
+        {
+            entry: "./base/src/exports.ts",
+            output: {
+                filename: "taipy-gui-base.js",
+                path: taipyGuiBaseExportPath,
+                library: {
+                    name: taipyGuiBaseBundleName,
+                    type: "umd",
                 },
-            ],
-        },
-        resolve: {
-            extensions: [".tsx", ".ts", ".js", ".tsx"],
-        },
-        plugins: [
-            new CopyWebpackPlugin({
-                patterns: [
-                    { from: "./base/src/packaging", to: taipyGuiBaseExportPath },
+                publicPath: "",
+            },
+            module: {
+                rules: [
+                    {
+                        test: /\.tsx?$/,
+                        use: "ts-loader",
+                        exclude: /node_modules/,
+                    },
                 ],
-            }),
-        ],
-    }];
+            },
+            resolve: {
+                extensions: [".tsx", ".ts", ".js", ".tsx"],
+            },
+            plugins: [
+                new CopyWebpackPlugin({
+                    patterns: [
+                        { from: "./base/src/packaging", to: taipyGuiBaseExportPath },
+                    ],
+                }),
+            ],
+        }];
 };

BIN
readme_img/gui_creation.webp


BIN
readme_img/readme_app.gif


BIN
readme_img/readme_cloud_demo.gif


BIN
readme_img/readme_demo_studio.gif


BIN
readme_img/readme_exec_graph.png


BIN
readme_img/readme_logo.png


BIN
readme_img/scenario_and_data_mgt.gif


BIN
readme_img/taipy-github-optimized.png


BIN
readme_img/taipy_banner.png


BIN
readme_img/taipy_github_GUI_video.gif


BIN
readme_img/taipy_github_data_support.png


BIN
readme_img/taipy_github_scenario.png


BIN
readme_img/taipy_github_scenarios_video.gif


BIN
readme_img/tiny_demo_readme.gif


+ 9 - 20
taipy/common/pyproject.toml

@@ -1,18 +1,17 @@
 [build-system]
-requires = ["setuptools>=42", "wheel"]
+requires = ["setuptools>=76", "wheel"]
 build-backend = "setuptools.build_meta"
 
 [project]
 name = "taipy-common"
 description = "A Taipy package dedicated to provide common data structures, types, classes and functions."
 readme = "package_desc.md"
-requires-python = ">=3.9"
-license = {text = "Apache License 2.0"}
+requires-python = ">=3.9,<3.13"
+license = "Apache-2.0"
 authors = [{name = "Avaiga", email = "dev@taipy.io"}]
-keywords = ["taipy-common"]
+keywords = ["taipy", "taipy-common"]
 classifiers = [
     "Intended Audience :: Developers",
-    "License :: OSI Approved :: Apache Software License",
     "Natural Language :: English",
     "Programming Language :: Python :: 3",
     "Programming Language :: Python :: 3.9",
@@ -22,21 +21,11 @@ classifiers = [
 ]
 dynamic = ["version", "dependencies"]
 
-[project.optional-dependencies]
-testing = ["pytest>=3.9"]
-
 [tool.setuptools.packages]
-find = {include = [
-    "taipy",
-    "taipy.common",
-    "taipy.common.*",
-    "taipy.common.config",
-    "taipy.common.config.*",
-    "taipy.common.logger",
-    "taipy.common.logger.*",
-    "taipy.common._cli",
-    "taipy.common._cli.*"
-]}
+find = {include = ["taipy", "taipy.common", "taipy.common.*"]}
+
+[project.optional-dependencies]
+test = ["pytest>=6.0"]
 
 [project.urls]
-homepage = "https://github.com/avaiga/taipy"
+Homepage = "https://github.com/Avaiga/taipy"

+ 1 - 1
taipy/common/version.json

@@ -1 +1 @@
-{"major": 4, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 4, "minor": 1, "patch": 0}

+ 1 - 1
taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py

@@ -54,7 +54,7 @@ class _TaskFunctionWrapper:
 
     def _read_inputs(self, inputs: List[DataNode]) -> List[Any]:
         data_manager = _DataManagerFactory._build_manager()
-        return [data_manager._get(dn.id).read_or_raise() for dn in inputs]
+        return [data_manager._read(data_manager._get(dn.id)) for dn in inputs]
 
     def _write_data(self, outputs: List[DataNode], results, job_id: JobId):
         data_manager = _DataManagerFactory._build_manager()

+ 42 - 2
taipy/core/data/_data_manager.py

@@ -10,10 +10,11 @@
 # specific language governing permissions and limitations under the License.
 
 import os
-from typing import Dict, Iterable, List, Optional, Set, Union
+from typing import Any, Dict, Iterable, List, Optional, Set, Union
 
 from taipy.common.config import Config
 from taipy.common.config._config import _Config
+from taipy.core.job.job_id import JobId
 
 from .._manager._manager import _Manager
 from .._repository._abstract_repository import _AbstractRepository
@@ -21,7 +22,7 @@ from .._version._version_mixin import _VersionMixin
 from ..common.scope import Scope
 from ..config.data_node_config import DataNodeConfig
 from ..cycle.cycle_id import CycleId
-from ..exceptions.exceptions import InvalidDataNodeType
+from ..exceptions.exceptions import InvalidDataNodeType, NoData
 from ..notification import Event, EventEntityType, EventOperation, Notifier, _make_event
 from ..reason import EntityDoesNotExist, NotGlobalScope, ReasonCollection, WrongConfigType
 from ..reason.reason import DataIsNotDuplicable
@@ -125,6 +126,45 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
         filters = cls._build_filters_with_version(version_number)
         return cls._repository._load_all(filters)
 
+    @classmethod
+    def _read(cls, data_node: DataNode) -> Any:
+        """Read the data referenced by this data node.
+
+        Returns:
+            The data referenced by this data node.
+
+        Raises:
+            NoData^: If the data has not been written yet.
+        """
+        if not data_node.last_edit_date:
+            raise NoData(f"Data node {data_node.id} from config {data_node.config_id} has not been written yet.")
+
+        return data_node._read()
+
+    @classmethod
+    def _append(
+        cls, data_node: DataNode, data, editor_id: Optional[str] = None, comment: Optional[str] = None, **kwargs: Any
+    ):
+        data_node._append(data)
+        data_node.track_edit(editor_id=editor_id, comment=comment, **kwargs)
+        data_node.unlock_edit()
+        cls._update(data_node)
+
+    @classmethod
+    def _write(
+        cls,
+        data_node: DataNode,
+        data,
+        job_id: Optional[JobId] = None,
+        editor_id: Optional[str] = None,
+        comment: Optional[str] = None,
+        **kwargs: Any,
+    ):
+        data_node._write(data)
+        data_node.track_edit(job_id=job_id, editor_id=editor_id, comment=comment, **kwargs)
+        data_node.unlock_edit()
+        cls._update(data_node)
+
     @classmethod
     def _clean_generated_file(cls, data_node: DataNode) -> None:
         if not isinstance(data_node, _FileDataNodeMixin):

+ 12 - 14
taipy/core/data/data_node.py

@@ -404,9 +404,9 @@ class DataNode(_Entity, _Labeled):
         Raises:
             NoData^: If the data has not been written yet.
         """
-        if not self.last_edit_date:
-            raise NoData(f"Data node {self.id} from config {self.config_id} has not been written yet.")
-        return self._read()
+        from ._data_manager_factory import _DataManagerFactory
+
+        return _DataManagerFactory._build_manager()._read(self)
 
     def read(self) -> Any:
         """Read the data referenced by this data node.
@@ -414,8 +414,10 @@ class DataNode(_Entity, _Labeled):
         Returns:
             The data referenced by this data node. None if the data has not been written yet.
         """
+        from ._data_manager_factory import _DataManagerFactory
+
         try:
-            return self.read_or_raise()
+            return _DataManagerFactory._build_manager()._read(self)
         except NoData:
             self._logger.warning(
                 f"Data node {self.id} from config {self.config_id} is being read but has never been written."
@@ -432,8 +434,6 @@ class DataNode(_Entity, _Labeled):
             **kwargs (Any): Extra information to attach to the edit document
                 corresponding to this write.
         """
-        from ._data_manager_factory import _DataManagerFactory
-
         if (
             editor_id
             and self.edit_in_progress
@@ -441,10 +441,10 @@ class DataNode(_Entity, _Labeled):
             and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())
         ):
             raise DataNodeIsBeingEdited(self.id, self.editor_id)
-        self._append(data)
-        self.track_edit(editor_id=editor_id, comment=comment, **kwargs)
-        self.unlock_edit()
-        _DataManagerFactory._build_manager()._update(self)
+
+        from ._data_manager_factory import _DataManagerFactory
+
+        _DataManagerFactory._build_manager()._append(self, data, editor_id, comment, **kwargs)
 
     def write(
         self,
@@ -473,12 +473,10 @@ class DataNode(_Entity, _Labeled):
             and (not self.editor_expiration_date or self.editor_expiration_date > datetime.now())
         ):
             raise DataNodeIsBeingEdited(self.id, self.editor_id)
-        self._write(data)
-        self.track_edit(job_id=job_id, editor_id=editor_id, comment=comment, **kwargs)
-        self.unlock_edit()
+
         from ._data_manager_factory import _DataManagerFactory
 
-        _DataManagerFactory._build_manager()._update(self)
+        _DataManagerFactory._build_manager()._write(self, data, job_id, editor_id, comment, **kwargs)
 
     def track_edit(
         self,

+ 11 - 11
taipy/core/pyproject.toml

@@ -1,18 +1,17 @@
 [build-system]
-requires = ["setuptools>=42", "wheel"]
+requires = ["setuptools>=76", "wheel"]
 build-backend = "setuptools.build_meta"
 
 [project]
 name = "taipy-core"
 description = "A Python library to build powerful and customized data-driven back-end applications."
 readme = "package_desc.md"
-requires-python = ">=3.9"
-license = {text = "Apache License 2.0"}
+requires-python = ">=3.9,<3.13"
+license = "Apache-2.0"
 authors = [{name = "Avaiga", email = "dev@taipy.io"}]
-keywords = ["taipy-core"]
+keywords = ["taipy", "taipy-core"]
 classifiers = [
     "Intended Audience :: Developers",
-    "License :: OSI Approved :: Apache Software License",
     "Natural Language :: English",
     "Programming Language :: Python :: 3",
     "Programming Language :: Python :: 3.9",
@@ -22,16 +21,17 @@ classifiers = [
 ]
 dynamic = ["version", "dependencies"]
 
+[tool.setuptools.packages]
+find = {include = ["taipy", "taipy.core", "taipy.core.*"]}
+
 [project.optional-dependencies]
+mongo = ["pymongo[srv]>=4.2.0,<5.0"]
 mssql = ["pyodbc>=4,<4.1"]
 mysql = ["pymysql>1,<1.1"]
-postgresql = ["psycopg2>2.9,<2.10"]
 parquet = ["fastparquet==2022.11.0", "pyarrow>=16.0.0,<19.0"]
+postgresql = ["psycopg2>2.9,<2.10"]
 s3 = ["boto3==1.29.1"]
-mongo = ["pymongo[srv]>=4.2.0,<5.0"]
-
-[tool.setuptools.packages]
-find = {include = ["taipy", "taipy.core", "taipy.core.*"]}
+test = ["pytest>=6.0"]
 
 [project.urls]
-homepage = "https://github.com/avaiga/taipy"
+Homepage = "https://github.com/Avaiga/taipy"

+ 1 - 1
taipy/core/version.json

@@ -1 +1 @@
-{"major": 4, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 4, "minor": 1, "patch": 0}

+ 0 - 1
taipy/gui/_default_config.py

@@ -42,7 +42,6 @@ _default_stylekit: Stylekit = {
 # Default config loaded by app.py
 default_config: Config = {
     "allow_unsafe_werkzeug": False,
-    "app_id": False,
     "async_mode": "gevent",
     "change_delay": None,
     "chart_dark_template": None,

+ 1 - 0
taipy/gui/_renderers/factory.py

@@ -132,6 +132,7 @@ class _Factory:
                 ("width", PropertyType.string_or_number),
                 ("height", PropertyType.string_or_number),
                 ("layout", PropertyType.dynamic_dict),
+                ("animation_data", PropertyType.data),
                 ("plot_config", PropertyType.dict),
                 ("on_range_change", PropertyType.function),
                 ("active", PropertyType.dynamic_boolean, True),

+ 2 - 2
taipy/gui/_renderers/json.py

@@ -32,8 +32,8 @@ class JsonAdapter(ABC):
         _TaipyJsonAdapter().register(self)
 
     @abstractmethod
-    def parse(self, o) -> t.Union[t.Any, None]:
-        return None
+    def parse(self, o) -> t.Optional[t.Any]:
+        ...  # pragma: no cover
 
 
 class _DefaultJsonAdapter(JsonAdapter):

+ 0 - 2
taipy/gui/config.py

@@ -29,7 +29,6 @@ from .utils import _is_in_notebook
 
 ConfigParameter = t.Literal[
     "allow_unsafe_werkzeug",
-    "app_id",
     "async_mode",
     "change_delay",
     "chart_dark_template",
@@ -104,7 +103,6 @@ Config = t.TypedDict(
     "Config",
     {
         "allow_unsafe_werkzeug": bool,
-        "app_id": t.Optional[bool],
         "async_mode": str,
         "change_delay": t.Optional[int],
         "chart_dark_template": t.Optional[t.Dict[str, t.Any]],

+ 1 - 1
taipy/gui/data/content_accessor.py

@@ -113,7 +113,7 @@ class _ContentAccessor:
             self.__content_paths[url_path] = dir_path
             file_url = f"{url_path}/{path.name}"
             self.__url_is_image[file_url] = image
-            return (urllib.parse.quote_plus(file_url, safe="/"),)
+            return (urllib.parse.quote(file_url, safe="/"),)
         elif _has_magic_module:
             try:
                 mime = magic.from_buffer(value, mime=True)

+ 10 - 11
taipy/gui/gui.py

@@ -724,10 +724,9 @@ class Gui:
                     payload.get("id", "") if isinstance(payload, dict) else str(payload)
                 )
                 client_id = res[0] if res[1] else None
-                if self._config.config.get("app_id", False):
-                    front_app_id = payload.get("app_id", None) if isinstance(payload, dict) else None
-                    if front_app_id is not None:
-                        self.__handle_ws_app_id({"name": message.get("name"), "payload": front_app_id})
+                front_gui_addr = payload.get("gui_addr", None) if isinstance(payload, dict) else None
+                if front_gui_addr is not None:
+                    self.__handle_ws_gui_addr({"name": message.get("name"), "payload": front_gui_addr})
             expected_client_id = client_id or message.get(Gui.__ARG_CLIENT_ID)
             self.__set_client_id_in_context(expected_client_id)
             get_request_meta().ws_client_id = expected_client_id
@@ -752,8 +751,8 @@ class Gui:
                         self.__handle_ws_get_module_context(payload)
                     elif msg_type == _WsType.GET_DATA_TREE.value:
                         self.__handle_ws_get_data_tree()
-                    elif msg_type == _WsType.APP_ID.value:
-                        self.__handle_ws_app_id(message)
+                    elif msg_type == _WsType.GUI_ADDR.value:
+                        self.__handle_ws_gui_addr(message)
                     elif msg_type == _WsType.GET_ROUTES.value:
                         self.__handle_ws_get_routes()
                     elif msg_type == _WsType.LOCAL_STORAGE.value:
@@ -1361,18 +1360,18 @@ class Gui:
             send_back_only=True,
         )
 
-    def __handle_ws_app_id(self, message: t.Any):
+    def __handle_ws_gui_addr(self, message: t.Any):
         if not isinstance(message, dict):
             return
         name = message.get("name", "")
         payload = message.get("payload", "")
-        app_id = id(self)
-        if payload == app_id:
+        gui_addr = id(self)
+        if payload == gui_addr:
             return
         self.__send_ws(
             {
-                "type": _WsType.APP_ID.value,
-                "payload": {"name": name, "id": app_id},
+                "type": _WsType.GUI_ADDR.value,
+                "payload": {"name": name, "id": gui_addr},
             },
             send_back_only=True,
         )

+ 1 - 1
taipy/gui/types.py

@@ -43,7 +43,7 @@ class _WsType(Enum):
     BLOCK = "BL"
     NAVIGATE = "NA"
     CLIENT_ID = "ID"
-    APP_ID = "AID"
+    GUI_ADDR = "GA"
     MULTIPLE_MESSAGE = "MS"
     DOWNLOAD_FILE = "DF"
     PARTIAL = "PR"

+ 4 - 3
taipy/gui/utils/chart_config_builder.py

@@ -192,6 +192,7 @@ def _build_chart_config(  # noqa: C901
         or ({"color": t[_Chart_iprops.color.value]} if t[_Chart_iprops.color.value] else None)
         for t in traces
     ]
+
     opt_cols: t.List[t.Set[str]] = [set()] * len(traces)
     for idx, m in enumerate(markers):
         if isinstance(m, (dict, _MapDict)):
@@ -227,9 +228,9 @@ def _build_chart_config(  # noqa: C901
         [
             c2
             for c2 in [
-                __get_col_from_indexed(c1, i)
-                for c1 in t.cast(dict, col_dicts[i] if i < len(col_dicts) else col_dicts[0]).keys()
-            ]
+            __get_col_from_indexed(c1, i)
+            for c1 in t.cast(dict, col_dicts[i] if i < len(col_dicts) else col_dicts[0]).keys()
+        ]
             if c2
         ]
         for i in range(len(traces))

+ 1 - 1
taipy/gui/version.json

@@ -1 +1 @@
-{"major": 4, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 4, "minor": 1, "patch": 0}

+ 5 - 0
taipy/gui/viselements.json

@@ -508,6 +508,11 @@
                         "default_value": "\"scatter\"",
                         "doc": "Chart type.<br/>See the Plotly <a href=\"https://plotly.com/javascript/reference/\">chart type</a> documentation for more details."
                     },
+                    {
+                        "name": "animation_data",
+                        "type": "dynamic(Any)",
+                        "doc": "A dictionary representing updated values for the dataset defined in <i>data</i>.<br/>Each key in <i>animation_data</i> must exist in <i>data</i>. Changing this property triggers an animation of the chart, showing the changes between the original (<i>data</i>) and updated (<i>animation_data</i>) values.<br/> This animation functionality currently works only with <i>scatter</i> trace types. If you are using other trace types, the animation may not behave as expected."
+                    },
                     {
                         "name": "mode",
                         "type": "indexed(str)",

+ 9 - 7
taipy/rest/pyproject.toml

@@ -1,18 +1,17 @@
 [build-system]
-requires = ["setuptools>=42", "wheel"]
+requires = ["setuptools>=76", "wheel"]
 build-backend = "setuptools.build_meta"
 
 [project]
 name = "taipy-rest"
 description = "Library to expose taipy-core REST APIs."
 readme = "package_desc.md"
-requires-python = ">=3.9"
-license = {text = "Apache License 2.0"}
+requires-python = ">=3.9,<3.13"
+license = "Apache-2.0"
 authors = [{name = "Avaiga", email = "dev@taipy.io"}]
-keywords = ["taipy-rest"]
+keywords = ["taipy", "rest", "taipy-rest"]
 classifiers = [
     "Intended Audience :: Developers",
-    "License :: OSI Approved :: Apache Software License",
     "Natural Language :: English",
     "Programming Language :: Python :: 3",
     "Programming Language :: Python :: 3.9",
@@ -23,7 +22,10 @@ classifiers = [
 dynamic = ["version", "dependencies"]
 
 [tool.setuptools.packages]
-find = {include = ["taipy", "taipy.rest"]}
+find = {include = ["taipy", "taipy.rest", "taipy.rest.*"]}
+
+[project.optional-dependencies]
+test = ["pytest>=6.0"]
 
 [project.urls]
-homepage = "https://github.com/avaiga/taipy"
+Homepage = "https://github.com/Avaiga/taipy"

+ 1 - 1
taipy/rest/version.json

@@ -1 +1 @@
-{"major": 4, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 4, "minor": 1, "patch": 0}

+ 6 - 7
taipy/templates/pyproject.toml

@@ -1,18 +1,17 @@
 [build-system]
-requires = ["setuptools>=42", "wheel"]
+requires = ["setuptools>=76", "wheel"]
 build-backend = "setuptools.build_meta"
 
 [project]
 name = "taipy-templates"
 description = "An open-source package holding Taipy application templates."
 readme = "package_desc.md"
-requires-python = ">=3.9"
-license = {text = "Apache License 2.0"}
+requires-python = ">=3.9,<3.13"
+license = "Apache-2.0"
 authors = [{name = "Avaiga", email = "dev@taipy.io"}]
-keywords = ["taipy-templates"]
+keywords = ["taipy", "taipy-templates"]
 classifiers = [
     "Intended Audience :: Developers",
-    "License :: OSI Approved :: Apache Software License",
     "Natural Language :: English",
     "Programming Language :: Python :: 3",
     "Programming Language :: Python :: 3.9",
@@ -23,7 +22,7 @@ classifiers = [
 dynamic = ["version", "dependencies"]
 
 [tool.setuptools.packages]
-find = {include = ["taipy"]}
+find = {include = ["taipy", "taipy.templates", "taipy.templates.*"]}
 
 [project.urls]
-homepage = "https://github.com/avaiga/taipy"
+Homepage = "https://github.com/Avaiga/taipy"

+ 1 - 1
taipy/templates/version.json

@@ -1 +1 @@
-{"major": 4, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 4, "minor": 1, "patch": 0}

+ 1 - 1
taipy/version.json

@@ -1 +1 @@
-{"major": 4, "minor": 1, "patch": 0, "ext": "dev2"}
+{"major": 4, "minor": 1, "patch": 0}

+ 64 - 1
tests/core/data/test_data_manager.py

@@ -12,7 +12,9 @@
 import os
 import pathlib
 
+import pandas as pd
 import pytest
+from pandas.testing import assert_frame_equal
 
 from taipy import Scope
 from taipy.common.config import Config
@@ -23,7 +25,7 @@ from taipy.core.data.csv import CSVDataNode
 from taipy.core.data.data_node_id import DataNodeId
 from taipy.core.data.in_memory import InMemoryDataNode
 from taipy.core.data.pickle import PickleDataNode
-from taipy.core.exceptions.exceptions import InvalidDataNodeType, ModelNotFound
+from taipy.core.exceptions.exceptions import InvalidDataNodeType, ModelNotFound, NoData
 from taipy.core.reason import EntityDoesNotExist, NotGlobalScope, WrongConfigType
 from tests.core.utils.named_temporary_file import NamedTemporaryFile
 
@@ -495,6 +497,67 @@ class TestDataManager:
 
         dm._delete_all()
 
+    @pytest.mark.parametrize(
+        "storage_type,path",
+        [
+            ("pickle", "pickle_file_path"),
+            ("csv", "csv_file"),
+            ("excel", "excel_file"),
+            ("json", "json_file"),
+            ("parquet", "parquet_file_path"),
+        ],
+    )
+    def test_read(self, storage_type, path, request):
+        path = request.getfixturevalue(path)
+
+        non_exist_dn_config = Config.configure_data_node(id="d1", storage_type=storage_type, path="non_exist_path")
+        dn_config = Config.configure_data_node(id="d2", storage_type=storage_type, path=path)
+        dn_1 = _DataManager._create(non_exist_dn_config, None, None)
+        dn_2 = _DataManager._create(dn_config, None, None)
+
+        with pytest.raises(NoData):
+            _DataManager._read(dn_1)
+
+        assert dn_2._read() is not None
+
+    @pytest.mark.parametrize(
+        "storage_type,path",
+        [
+            ("pickle", "pickle_file_path"),
+            ("csv", "csv_file"),
+            ("parquet", "parquet_file_path"),
+        ],
+    )
+    def test_write(self, storage_type, path, request):
+        path = request.getfixturevalue(path)
+
+        dn_config = Config.configure_data_node(id="d2", storage_type=storage_type, path=path)
+        dn = _DataManager._create(dn_config, None, None)
+
+        new_data = pd.DataFrame([{"a": 11, "b": 12, "c": 13}, {"a": 14, "b": 15, "c": 16}])
+
+        _DataManager._write(dn, new_data)
+        assert_frame_equal(dn._read(), new_data)
+
+    @pytest.mark.parametrize(
+        "storage_type,path",
+        [
+            ("csv", "csv_file"),
+            ("parquet", "parquet_file_path"),
+        ],
+    )
+    def test_append(self, storage_type, path, request):
+        path = request.getfixturevalue(path)
+
+        dn_config = Config.configure_data_node(id="d2", storage_type=storage_type, path=path)
+        dn = _DataManager._create(dn_config, None, None)
+
+        old_data = _DataManager._read(dn)
+        new_data = pd.DataFrame([{"a": 11, "b": 12, "c": 13}, {"a": 14, "b": 15, "c": 16}])
+
+        _DataManager._append(dn, new_data)
+        assert_frame_equal(dn._read(), pd.concat([old_data, new_data], ignore_index=True))
+
     @pytest.mark.parametrize(
         "storage_type,path",
         [

+ 3 - 1
tests/core/data/test_data_node.py

@@ -150,8 +150,10 @@ class TestDataNode:
     def test_read_write(self):
         dn = FakeDataNode("foo_bar")
         _DataManagerFactory._build_manager()._repository._save(dn)
+        assert dn.read() is None
+        with pytest.raises(NoData):
+            _DataManagerFactory._build_manager()._read(dn)
         with pytest.raises(NoData):
-            assert dn.read() is None
             dn.read_or_raise()
         assert dn.write_has_been_called == 0
         assert dn.read_has_been_called == 0

+ 3 - 1
tests/core/data/test_in_memory_data_node.py

@@ -52,8 +52,10 @@ class TestInMemoryDataNodeEntity:
     def test_read_and_write(self):
         no_data_dn = InMemoryDataNode("foo", Scope.SCENARIO, DataNodeId("dn_id"))
         _DataManagerFactory._build_manager()._repository._save(no_data_dn)
+        assert no_data_dn.read() is None
+        with pytest.raises(NoData):
+            _DataManagerFactory._build_manager()._read(no_data_dn)
         with pytest.raises(NoData):
-            assert no_data_dn.read() is None
             no_data_dn.read_or_raise()
         in_mem_dn = InMemoryDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
         _DataManagerFactory._build_manager()._repository._save(in_mem_dn)

+ 3 - 1
tests/core/data/test_json_data_node.py

@@ -162,8 +162,10 @@ class TestJSONDataNode:
 
     def test_read_non_existing_json(self):
         not_existing_json = JSONDataNode("foo", Scope.SCENARIO, properties={"default_path": "WRONG.json"})
+        assert not_existing_json.read() is None
+        with pytest.raises(NoData):
+            _DataManagerFactory._build_manager()._read(not_existing_json)
         with pytest.raises(NoData):
-            assert not_existing_json.read() is None
             not_existing_json.read_or_raise()
 
     def test_read(self):

+ 3 - 1
tests/core/data/test_pickle_data_node.py

@@ -117,8 +117,10 @@ class TestPickleDataNodeEntity:
     def test_read_and_write(self):
         no_data_dn = PickleDataNode("foo", Scope.SCENARIO)
         _DataManagerFactory._build_manager()._repository._save(no_data_dn)
+        assert no_data_dn.read() is None
+        with pytest.raises(NoData):
+            _DataManagerFactory._build_manager()._read(no_data_dn)
         with pytest.raises(NoData):
-            assert no_data_dn.read() is None
             no_data_dn.read_or_raise()
         pickle_str = PickleDataNode("foo", Scope.SCENARIO, properties={"default_data": "bar"})
         _DataManagerFactory._build_manager()._repository._save(pickle_str)

+ 6 - 3
tests/core/data/test_read_csv_data_node.py

@@ -18,6 +18,7 @@ import pandas as pd
 import pytest
 
 from taipy import Scope
+from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data.csv import CSVDataNode
 from taipy.core.exceptions.exceptions import NoData
 
@@ -33,8 +34,10 @@ class MyCustomObject:
 
 def test_raise_no_data_with_header():
     not_existing_csv = CSVDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.csv", "has_header": True})
+    assert not_existing_csv.read() is None
+    with pytest.raises(NoData):
+        _DataManagerFactory._build_manager()._read(not_existing_csv)
     with pytest.raises(NoData):
-        assert not_existing_csv.read() is None
         not_existing_csv.read_or_raise()
 
 
@@ -75,9 +78,9 @@ def test_read_with_header_custom_exposed_type():
 
 def test_raise_no_data_without_header():
     not_existing_csv = CSVDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.csv", "has_header": False})
+    assert not_existing_csv.read() is None
     with pytest.raises(NoData):
-        assert not_existing_csv.read() is None
-        not_existing_csv.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_csv)
 
 
 def test_read_without_header_pandas():

+ 12 - 11
tests/core/data/test_read_excel_data_node.py

@@ -18,6 +18,7 @@ import pandas as pd
 import pytest
 
 from taipy import Scope
+from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data.excel import ExcelDataNode
 from taipy.core.exceptions.exceptions import (
     ExposedTypeLengthMismatch,
@@ -62,9 +63,11 @@ custom_pandas_numpy_exposed_type_dict = {"Sheet1": "pandas", "Sheet2": "numpy"}
 
 
 def test_raise_no_data_with_header():
+    not_existing_excel = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx"})
+    assert not_existing_excel.read() is None
+    with pytest.raises(NoData):
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
     with pytest.raises(NoData):
-        not_existing_excel = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx"})
-        assert not_existing_excel.read() is None
         not_existing_excel.read_or_raise()
 
 
@@ -79,12 +82,10 @@ def test_read_empty_excel_with_header():
 
 
 def test_raise_no_data_without_header():
+    not_existing_excel = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx", "has_header": False})
+    assert not_existing_excel.read() is None
     with pytest.raises(NoData):
-        not_existing_excel = ExcelDataNode(
-            "foo", Scope.SCENARIO, properties={"path": "WRONG.xlsx", "has_header": False}
-        )
-        assert not_existing_excel.read() is None
-        not_existing_excel.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
 
 
 def test_read_empty_excel_without_header():
@@ -103,9 +104,9 @@ def test_read_multi_sheet_with_header_no_data():
         Scope.SCENARIO,
         properties={"path": "WRONG.xlsx", "sheet_name": ["sheet_name_1", "sheet_name_2"]},
     )
+    assert not_existing_excel.read() is None
     with pytest.raises(NoData):
-        assert not_existing_excel.read() is None
-        not_existing_excel.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
 
 
 def test_read_multi_sheet_without_header_no_data():
@@ -114,9 +115,9 @@ def test_read_multi_sheet_without_header_no_data():
         Scope.SCENARIO,
         properties={"path": "WRONG.xlsx", "has_header": False, "sheet_name": ["sheet_name_1", "sheet_name_2"]},
     )
+    assert not_existing_excel.read() is None
     with pytest.raises(NoData):
-        assert not_existing_excel.read() is None
-        not_existing_excel.read_or_raise()
+        _DataManagerFactory._build_manager()._read(not_existing_excel)
 
 
 ########################## SINGLE SHEET ##########################

+ 3 - 1
tests/core/data/test_read_parquet_data_node.py

@@ -68,8 +68,10 @@ class TestReadParquetDataNode:
         not_existing_parquet = ParquetDataNode(
             "foo", Scope.SCENARIO, properties={"path": "nonexistent.parquet", "engine": engine}
         )
+        assert not_existing_parquet.read() is None
+        with pytest.raises(NoData):
+            _DataManagerFactory._build_manager()._read(not_existing_parquet)
         with pytest.raises(NoData):
-            assert not_existing_parquet.read() is None
             not_existing_parquet.read_or_raise()
 
     @pytest.mark.parametrize("engine", __engine)

+ 20 - 0
tests/gui/control/test_file_download.py

@@ -50,6 +50,26 @@ def test_file_download_path_md(gui: Gui, test_client, helpers):
     helpers.test_control_md(gui, md_string, expected_list)
 
 
+def test_file_download_with_spaces_path_md(gui: Gui, test_client, helpers):
+    resources_dir = pathlib.Path(__file__).parent.parent / "resources"
+    test_file_path = resources_dir / "test file with spaces.txt"
+
+    try:
+        with open(test_file_path, "w") as f:
+            f.write("Test content")
+
+        gui._bind_var_val("content", str(test_file_path.resolve()))
+        md_string = "<|{content}|file_download|>"
+        expected_list = [
+            "<FileDownload",
+            'defaultContent="/taipy-content/taipyStatic0/test%20file%20with%20spaces.txt"',
+        ]
+        helpers.test_control_md(gui, md_string, expected_list)
+    finally:
+        if test_file_path.exists():
+            test_file_path.unlink()
+
+
 def test_file_download_any_file_md(gui: Gui, test_client, helpers):
     with open(os.path.abspath(__file__), "rb") as content:
         gui._bind_var_val("content", content.read())

+ 90 - 0
tests/gui/gui_specific/test_json_adapter.py

@@ -0,0 +1,90 @@
+# Copyright 2021-2025 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import json
+import typing as t
+import warnings
+from datetime import timedelta
+from pathlib import Path
+
+import numpy
+
+from taipy.gui import Icon
+from taipy.gui._renderers.json import JsonAdapter, _TaipyJsonEncoder
+from taipy.gui.utils import _DoNotUpdate, _TaipyNumber
+
+
+def test_default_adapter():
+    var = 123
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    assert json_string == json.dumps(var)
+
+    var = Icon("image.png", "Text")
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    assert json_string == json.dumps({"path": "image.png", "text": "Text"})
+
+    var = _TaipyNumber(123, "number")
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    assert json_string == "123.0"
+
+    var = Path("a", "b")
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    json_string = json_string.replace("\\\\", "/")
+    assert json_string == '"a/b"'
+
+    var = timedelta(1.5)
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    assert json_string == '"1 day, 12:00:00"'
+
+    var = numpy.int32(123)
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    assert json_string == "123"
+
+    var = _DoNotUpdate()
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    assert json_string == "null"
+
+
+def test_adapter_unknown(helpers):
+    class TestClass:
+        def __init__(self, value: str):
+            self._value = value
+
+    var = TestClass("test")
+    with warnings.catch_warnings(record=True) as records:
+        json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+        assert json_string == "null"
+        warns = helpers.get_taipy_warnings(records)
+        assert len(warns) == 1
+        assert "TestClass is not JSON serializable" in str(warns[0].message)
+
+
+def test_custom_adapter():
+    class TestClass:
+        def __init__(self, value: str):
+            self._value = TestClass.change_string(value)
+
+        @staticmethod
+        def change_string(s: str) -> str:
+            return s[::-1]
+
+    class TestAdapter(JsonAdapter):
+        def parse(self, o) -> t.Optional[t.Any]:
+            if isinstance(o, TestClass):
+                return o._value
+            return None
+
+    TestAdapter().register()
+
+    s = "abc"
+    var = TestClass(s)
+    json_string = json.dumps(var, cls=_TaipyJsonEncoder)
+    assert json_string == json.dumps(TestClass.change_string(s))

+ 0 - 16
tools/release/extract_from_setup.py → tests/tools/release/__init__.py

@@ -8,19 +8,3 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
-
-import sys
-
-
-def extract_gui_version(base_path: str) -> None:
-    with open("setup.py") as f:
-        for line in f:
-            if "taipy-gui" in line:
-                start = line.find("taipy-gui")
-                end = line.rstrip().find('",')
-                print(f"VERSION={line[start:end]}")  # noqa: T201
-                break
-
-
-if __name__ == "__main__":
-    extract_gui_version(sys.argv[1])

+ 156 - 0
tests/tools/release/test_version.py

@@ -0,0 +1,156 @@
+# Copyright 2021-2025 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+import pytest
+
+from tools.release.common import Version
+
+
+def test_from_string():
+    with pytest.raises(ValueError):
+        Version.from_string("invalid")
+    with pytest.raises(ValueError):
+        Version.from_string("1")
+    with pytest.raises(ValueError):
+        Version.from_string("1.x.2")
+
+    version = Version.from_string("1.2")
+    assert version.major == 1
+    assert version.minor == 2
+    assert version.patch == 0
+    assert version.ext is None
+
+    version = Version.from_string("1.2.3")
+    assert version.major == 1
+    assert version.minor == 2
+    assert version.patch == 3
+    assert version.ext is None
+
+    version = Version.from_string("1.2.3.some_ext")
+    assert version.major == 1
+    assert version.minor == 2
+    assert version.patch == 3
+    assert version.ext == "some_ext"
+
+    version = Version.from_string("1.2.3.some_ext.more_ext")
+    assert version.major == 1
+    assert version.minor == 2
+    assert version.patch == 3
+    assert version.ext == "some_ext.more_ext"
+
+
+def test_extension():
+    version = Version.from_string("1.2.3")
+    extension = version.split_ext()
+    assert extension == ("", -1)
+
+    version = Version.from_string("1.2.3.some_ext")
+    extension = version.split_ext()
+    assert extension == ("some_ext", -1)
+
+    version = Version.from_string("1.2.3.some_ext123")
+    extension = version.split_ext()
+    assert extension == ("some_ext", 123)
+
+
+def test_to_string():
+    version = Version(major=1, minor=2)
+    assert str(version) == "1.2.0"
+
+    version = Version(major=1, minor=2, patch=3)
+    assert str(version) == "1.2.3"
+
+    version = Version(major=1, minor=2, patch=3, ext="some_ext")
+    assert str(version) == "1.2.3.some_ext"
+
+
+def test_to_dict():
+    version = Version(major=1, minor=2, patch=3)
+    assert version.to_dict() == {"major": 1, "minor": 2, "patch": 3}
+
+    version = Version(major=1, minor=2, patch=3, ext="some_ext")
+    assert version.to_dict() == {"major": 1, "minor": 2, "patch": 3, "ext": "some_ext"}
+
+
+def test_compatibility():
+    # Different major version number
+    v1 = Version(major=1, minor=2, patch=3)
+    v2 = Version(major=2, minor=2, patch=3)
+    assert not v1.is_compatible(v2), "Major versions differ"
+
+    # Different minor version number
+    v1 = Version(major=1, minor=2, patch=3)
+    v2 = Version(major=1, minor=3, patch=3)
+    assert not v1.is_compatible(v2), "Minor versions differ"
+
+    # All the same
+    v1 = Version(major=1, minor=2, patch=3)
+    v2 = Version(major=1, minor=2, patch=3)
+    assert v1.is_compatible(v2), "Identical versions"
+
+    # Greater patch number
+    v1 = Version(major=1, minor=2, patch=4)
+    v2 = Version(major=1, minor=2, patch=3)
+    assert v1.is_compatible(v2), "Patch number is greater"
+
+    # Smaller patch number
+    v1 = Version(major=1, minor=2, patch=3)
+    v2 = Version(major=1, minor=2, patch=4)
+    assert v1.is_compatible(v2), "Patch number is smaller"
+
+    # Same patch number, extension
+    v1 = Version(major=1, minor=2, patch=3, ext="ext")
+    v2 = Version(major=1, minor=2, patch=3)
+    assert v1.is_compatible(v2), "Same version, with extension"
+
+    # Same patch number, no extension
+    v1 = Version(major=1, minor=2, patch=3)
+    v2 = Version(major=1, minor=2, patch=3, ext="ext")
+    assert not v1.is_compatible(v2), "Same version, no extension is expected"
+
+    # Same patch number, different extension
+    v1 = Version(major=1, minor=2, patch=3, ext="some_ext")
+    v2 = Version(major=1, minor=2, patch=3, ext="another_ext")
+    assert not v1.is_compatible(v2), "Same version, different extensions"
+
+
+def test_order():
+    v1 = Version(major=1, minor=0)
+    v2 = Version(major=2, minor=0)
+    assert v1 < v2, "Version 1.0 is older than 2.0"
+    assert v2 > v1, "Version 2.0 is newer than 1.0"
+
+    v1 = Version(major=1, minor=0)
+    v2 = Version(major=1, minor=1)
+    assert v1 < v2, "Version 1.0 is older than 1.1"
+    assert v2 > v1, "Version 1.1 is newer than 1.0"
+
+    v1 = Version(major=1, minor=2)
+    v2 = Version(major=2, minor=1)
+    assert v1 < v2, "Version 1.2 is older than 2.1"
+    assert v2 > v1, "Version 2.1 is newer than 1.2"
+
+    v1 = Version(major=1, minor=0)
+    v2 = Version(major=1, minor=0, patch=1)
+    assert v1 < v2, "Version 1.0.0 is older than 1.0.1"
+    assert v2 > v1, "Version 1.0.1 is newer than 1.0.0"
+
+    v1 = Version(major=1, minor=2, patch=3)
+    v2 = Version(major=1, minor=2, patch=3, ext="dev0")
+    assert v1 > v2, "Version 1.2.3 is newer than 1.2.3.dev0"
+    assert v2 < v1, "Version 1.2.3.dev0 is older than 1.2.3"
+
+    v1 = Version(major=1, minor=2, patch=3, ext="dev0")
+    v2 = Version(major=1, minor=2, patch=3, ext="dev1")
+    assert v1 < v2, "Version 1.2.3.dev0 is older than 1.2.3.dev1"
+    assert v2 > v1, "Version 1.2.3.dev1 is newer 1.2.3.dev0"
+
+    versions = [Version(1, 0), Version(2, 1), Version(3, 4), Version(2, 0)]
+    assert max(versions) == Version(3, 4), "Cannot find max in Version list"

+ 1 - 0
tools/packages/pipfiles/Pipfile3.10.max

@@ -4,6 +4,7 @@ verify_ssl = true
 name = "pypi"
 
 [dev-packages]
+fastparquet = "*"
 freezegun = "*"
 ipython = "*"
 ipykernel = "*"

+ 1 - 0
tools/packages/pipfiles/Pipfile3.11.max

@@ -4,6 +4,7 @@ verify_ssl = true
 name = "pypi"
 
 [dev-packages]
+fastparquet = "*"
 freezegun = "*"
 ipython = "*"
 ipykernel = "*"

+ 1 - 0
tools/packages/pipfiles/Pipfile3.12.max

@@ -4,6 +4,7 @@ verify_ssl = true
 name = "pypi"
 
 [dev-packages]
+fastparquet = "*"
 freezegun = "*"
 ipython = "*"
 ipykernel = "*"

+ 1 - 0
tools/packages/pipfiles/Pipfile3.9.max

@@ -4,6 +4,7 @@ verify_ssl = true
 name = "pypi"
 
 [dev-packages]
+fastparquet = "*"
 freezegun = "*"
 ipython = "*"
 ipykernel = "*"

+ 3 - 2
tools/packages/taipy-common/MANIFEST.in

@@ -1,6 +1,7 @@
+# Package taipy-common
 include taipy/common/*.json
 include taipy/common/config/*.pyi
 include taipy/common/config/*.json
-include *.json
-include taipy/common/setup.requirements.txt
+
+include setup.requirements.txt
 include package_desc.md

+ 4 - 30
tools/packages/taipy-common/setup.py

@@ -14,43 +14,17 @@
 import json
 from pathlib import Path
 
-from setuptools import find_packages, setup
+from setuptools import setup
 
 root_folder = Path(__file__).parent
 
-package_desc = Path(root_folder / "package_desc.md").read_text("UTF-8")
-
-version_path = "taipy/common/version.json"
-
-setup_requirements = Path("taipy/common/setup.requirements.txt")
-
-with open(version_path) as version_file:
+with open(root_folder / "taipy" / "common" / "version.json") as version_file:
     version = json.load(version_file)
-    version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
+    version_string = f'{version.get("major")}.{version.get("minor")}.{version.get("patch")}'
     if vext := version.get("ext"):
         version_string = f"{version_string}.{vext}"
 
-requirements = [r for r in (setup_requirements).read_text("UTF-8").splitlines() if r]
-
-test_requirements = ["pytest>=3.8"]
-
 setup(
     version=version_string,
-    install_requires=requirements,
-    packages=find_packages(
-        where=root_folder, include=[
-            "taipy",
-            "taipy.common",
-            "taipy.common.*",
-            "taipy.common.config",
-            "taipy.common.config.*",
-            "taipy.common.logger",
-            "taipy.common.logger.*",
-            "taipy.common._cli",
-            "taipy.common._cli.*"
-        ]
-    ),
-    include_package_data=True,
-    data_files=[('version', [version_path])],
-    tests_require=test_requirements,
+    install_requires=[r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r]
 )

+ 3 - 2
tools/packages/taipy-core/MANIFEST.in

@@ -1,5 +1,6 @@
+# Package taipy-core
 include taipy/core/*.json
 include taipy/core/config/*.json
-include *.json
-include taipy/core/setup.requirements.txt
+
+include setup.requirements.txt
 include package_desc.md

+ 4 - 26
tools/packages/taipy-core/setup.py

@@ -14,39 +14,17 @@
 import json
 from pathlib import Path
 
-from setuptools import find_packages, setup
+from setuptools import setup
 
 root_folder = Path(__file__).parent
 
-package_desc = Path(root_folder / "package_desc.md").read_text("UTF-8")
-
-version_path = "taipy/core/version.json"
-
-setup_requirements = Path("taipy/core/setup.requirements.txt")
-
-with open(version_path) as version_file:
+with open(root_folder / "taipy" / "core" / "version.json") as version_file:
     version = json.load(version_file)
-    version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
+    version_string = f'{version.get("major")}.{version.get("minor")}.{version.get("patch")}'
     if vext := version.get("ext"):
         version_string = f"{version_string}.{vext}"
 
-requirements = [r for r in (setup_requirements).read_text("UTF-8").splitlines() if r]
-
-test_requirements = ["pytest>=3.8"]
-
-extras_require = {
-    "fastparquet": ["fastparquet==2022.11.0"],
-    "mssql": ["pyodbc>=4,<4.1"],
-    "mysql": ["pymysql>1,<1.1"],
-    "postgresql": ["psycopg2>2.9,<2.10"],
-}
-
 setup(
     version=version_string,
-    install_requires=requirements,
-    packages=find_packages(where=root_folder, include=["taipy", "taipy.core", "taipy.core.*"]),
-    include_package_data=True,
-    data_files=[('version', [version_path])],
-    tests_require=test_requirements,
-    extras_require=extras_require,
+    install_requires=[r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r]
 )

+ 3 - 2
tools/packages/taipy-gui/MANIFEST.in

@@ -1,7 +1,8 @@
+# Package taipy-gui
 recursive-include taipy/gui/webapp *
 include taipy/gui/version.json
 include taipy/gui/viselements.json
 include taipy/gui/*.pyi
-include *.json
-include taipy/gui/setup.requirements.txt
+
+include setup.requirements.txt
 include package_desc.md

+ 4 - 54
tools/packages/taipy-gui/setup.py

@@ -12,69 +12,19 @@
 """The setup script for taipy-gui package"""
 
 import json
-import os
-import platform
 from pathlib import Path
-import subprocess
 
-from setuptools import find_packages, setup
-from setuptools.command.build_py import build_py
+from setuptools import setup
 
 root_folder = Path(__file__).parent
 
-package_desc = Path(root_folder / "package_desc.md").read_text("UTF-8")
-
-version_path = "taipy/gui/version.json"
-
-setup_requirements = Path("taipy/gui/setup.requirements.txt")
-
-with open(version_path) as version_file:
+with open(root_folder / "taipy" / "gui" / "version.json") as version_file:
     version = json.load(version_file)
-    version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
+    version_string = f'{version.get("major")}.{version.get("minor")}.{version.get("patch")}'
     if vext := version.get("ext"):
         version_string = f"{version_string}.{vext}"
 
-requirements = [r for r in (setup_requirements).read_text("UTF-8").splitlines() if r]
-
-test_requirements = ["pytest>=3.8"]
-
-extras_require = {
-    "ngrok": ["pyngrok>=5.1,<6.0"],
-    "image": [
-        "python-magic>=0.4.24,<0.5;platform_system!='Windows'",
-        "python-magic-bin>=0.4.14,<0.5;platform_system=='Windows'",
-    ],
-    "arrow": ["pyarrow>=16.0.0,<19.0"],
-}
-
-
-class NPMInstall(build_py):
-    def run(self):
-        with_shell = platform.system() == "Windows"
-        print(f"Building taipy-gui frontend bundle in {root_folder}.")
-        already_exists = (root_folder / "taipy" / "gui" / "webapp" / "index.html").exists()
-        if already_exists:
-            print(f'Found taipy-gui frontend bundle in {root_folder  / "taipy" / "gui" / "webapp"}.')
-        else:
-            subprocess.run(
-                ["npm", "ci"], cwd=root_folder / "frontend" / "taipy-gui" / "dom", check=True, shell=with_shell
-            )
-            subprocess.run(
-                ["npm", "ci"], cwd=root_folder / "frontend" / "taipy-gui", check=True, shell=with_shell,
-            )
-            subprocess.run(
-                ["npm", "run", "build"], cwd=root_folder / "frontend" / "taipy-gui", check=True, shell=with_shell
-            )
-        build_py.run(self)
-
-
 setup(
     version=version_string,
-    install_requires=requirements,
-    packages=find_packages(where=root_folder, include=["taipy", "taipy.gui", "taipy.gui.*"]),
-    include_package_data=True,
-    data_files=[("version", [version_path])],
-    tests_require=test_requirements,
-    extras_require=extras_require,
-    cmdclass={"build_py": NPMInstall},
+    install_requires=[r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r]
 )

+ 3 - 2
tools/packages/taipy-rest/MANIFEST.in

@@ -1,4 +1,5 @@
+# Package taipy-rest
 include taipy/rest/*.json
-include *.json
-include taipy/rest/setup.requirements.txt
+
+include setup.requirements.txt
 include package_desc.md

+ 4 - 15
tools/packages/taipy-rest/setup.py

@@ -14,28 +14,17 @@
 import json
 from pathlib import Path
 
-from setuptools import find_packages, setup
+from setuptools import setup
 
 root_folder = Path(__file__).parent
 
-package_desc = Path(root_folder / "package_desc.md").read_text("UTF-8")
-
-version_path = "taipy/rest/version.json"
-
-setup_requirements = Path("taipy/rest/setup.requirements.txt")
-
-with open(version_path) as version_file:
+with open(root_folder / "taipy" / "rest" / "version.json") as version_file:
     version = json.load(version_file)
-    version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
+    version_string = f'{version.get("major")}.{version.get("minor")}.{version.get("patch")}'
     if vext := version.get("ext"):
         version_string = f"{version_string}.{vext}"
 
-requirements = [r for r in (setup_requirements).read_text("UTF-8").splitlines() if r]
-
 setup(
     version=version_string,
-    packages=find_packages(where=root_folder, include=["taipy", "taipy.rest", "taipy.rest.*"]),
-    include_package_data=True,
-    data_files=[('version', [version_path])],
-    install_requires=requirements,
+    install_requires=[r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r]
 )

+ 3 - 2
tools/packages/taipy-templates/MANIFEST.in

@@ -1,4 +1,5 @@
+# Package taipy-templates
 recursive-include taipy/templates *
-include *.json
-include taipy/templates/setup.requirements.txt
+
+include setup.requirements.txt
 include package_desc.md

+ 6 - 18
tools/packages/taipy-templates/setup.py

@@ -9,35 +9,23 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-"""The setup script."""
+"""The setup script for taipy-templates package"""
 
 import json
 from pathlib import Path
 
-from setuptools import find_packages, setup
+from setuptools import setup
 
 root_folder = Path(__file__).parent
 
-package_desc = Path(root_folder / "package_desc.md").read_text("UTF-8")
-
-version_path = "taipy/templates/version.json"
-
-setup_requirements = Path("taipy/templates/setup.requirements.txt")
-
-with open(version_path) as version_file:
+with open(root_folder / "taipy" / "templates" / "version.json") as version_file:
     version = json.load(version_file)
-    version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
+    version_string = f'{version.get("major")}.{version.get("minor")}.{version.get("patch")}'
     if vext := version.get("ext"):
         version_string = f"{version_string}.{vext}"
 
-requirements = [r for r in (setup_requirements).read_text("UTF-8").splitlines() if r]
-
-test_requirements = ["pytest>=3.8"]
-
 setup(
-    packages=find_packages(where=root_folder, include=["taipy"]),
-    include_package_data=True,
-    data_files=[('version', [version_path])],
-    test_suite="tests",
     version=version_string,
+    install_requires=[r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r],
+    test_suite="tests",
 )

+ 0 - 22
tools/packages/taipy/MANIFEST.in

@@ -1,29 +1,7 @@
-recursive-include tools *
-
 # Package taipy
 include taipy/*.json
 include taipy/gui_core/*.json
 include taipy/gui_core/lib/*.js
 
-# Package taipy-config
-include taipy/config/*.pyi
-include taipy/config/*.json
-
-# Package taipy-core
-include taipy/core/*.json
-include taipy/core/config/*.json
-
-# Package taipy-gui
-recursive-include taipy/gui/webapp *
-include taipy/gui/version.json
-include taipy/gui/viselements.json
-include taipy/gui/*.pyi
-
-# Package taipy-rest
-include taipy/rest/*.json
-
-# Package taipy-templates
-recursive-include taipy/templates *
-
 include setup.requirements.txt
 include package_desc.md

+ 3 - 36
tools/packages/taipy/setup.py

@@ -12,52 +12,19 @@
 """The setup script for taipy package"""
 
 import json
-import platform
 from pathlib import Path
-import subprocess
 
-from setuptools import find_packages, setup
-from setuptools.command.build_py import build_py
+from setuptools import setup
 
 root_folder = Path(__file__).parent
 
-package_desc = (root_folder / "package_desc.md").read_text("UTF-8")
-
 with open(root_folder / "taipy" / "version.json") as version_file:
     version = json.load(version_file)
-    version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
+    version_string = f'{version.get("major")}.{version.get("minor")}.{version.get("patch")}'
     if vext := version.get("ext"):
         version_string = f"{version_string}.{vext}"
 
-requirements = [r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r]
-
-test_requirements = ["pytest>=3.8"]
-
-
-class NPMInstall(build_py):
-    def run(self):
-        subprocess.run(
-            ["python", "bundle_build.py"],
-            cwd=root_folder / "tools" / "frontend",
-            check=True,
-            shell=platform.system() == "Windows",
-        )
-        build_py.run(self)
-
-
 setup(
     version=version_string,
-    install_requires=requirements,
-    packages=find_packages(include=["taipy", "taipy.*"]),
-    extras_require={
-        "ngrok": ["pyngrok>=5.1,<6.0"],
-        "image": [
-            "python-magic>=0.4.24,<0.5;platform_system!='Windows'",
-            "python-magic-bin>=0.4.14,<0.5;platform_system=='Windows'",
-        ],
-        "rdp": ["rdp>=0.8"],
-        "arrow": ["pyarrow>=16.0.0,<19.0"],
-        "mssql": ["pyodbc>=4"],
-    },
-    cmdclass={"build_py": NPMInstall},
+    install_requires=[r for r in (root_folder / "setup.requirements.txt").read_text("UTF-8").splitlines() if r]
 )

+ 148 - 12
tools/release/build_package_structure.py

@@ -8,24 +8,160 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
-
+# --------------------------------------------------------------------------------------------------
+# Builds the structure to hold the package files.
+#
+# Invoked by the workflow files build-and-release-single-package.yml and build-and-release.yml.
+# Working directory must be '[checkout-root]'.
+# --------------------------------------------------------------------------------------------------
+import argparse
+import json
 import os
+import re
 import shutil
-import sys
 from pathlib import Path
 
-__SKIP = ["LICENSE", "MANIFEST.in", "taipy", "setup.py", "tools", "pyproject.toml"]
+from common import Package, Version
 
+# Base build directory name
+DEST_ROOT = "build_"
 
-if __name__ == "__main__":
-    _package = sys.argv[1]
-    _package_path = f"taipy/{_package}"
+# Files to be copied from taipy/<package> to build directory
+BUILD_CP_FILES = ["README.md", "setup.py"]
+
+# Files to be moved from taipy/<package> to build directory
+BUILD_MV_FILES = ["LICENSE", "package_desc.md", "pyproject.toml"]
+
+# Items to skip while copying directory structure
+SKIP_ITEMS = {
+    "taipy": [
+        "build_taipy",
+        "doc",
+        "frontend",
+        "tests",
+        "tools",
+        ".git",
+        ".github",
+        ".pytest_cache",
+        "node_modules",
+    ],
+    "taipy-gui": [
+        "node_modules",
+    ],
+}
+
+# Regexp identifying subpackage directories in taipy hierarchy
+packages = "|".join(Package.NAMES)
+SUB_PACKAGE_DIR_PATTERN = re.compile(rf"taipy/(?:{packages})")
+
+
+# Filters files not to be copied
+def skip_path(path: str, package: Package, parent: str) -> bool:
+    path = path.replace("\\", "/")
+    if path.startswith("./"):
+        path = path[2:]
+    # Specific items per package
+    if (skip_items := SKIP_ITEMS.get(package.short_name, None)) and path in skip_items:
+        return True
+    # Taipy sub-package directories
+    if package.name == "taipy" and SUB_PACKAGE_DIR_PATTERN.fullmatch(path):
+        return True
+    # Others
+    if path.endswith("__pycache__") or path.startswith("build_"):
+        return True
+    return False
+
+
+def recursive_copy(package: Package, source, dest, *, parent: str = "", skip_root: bool = False):
+    dest_path = dest if skip_root else os.path.join(dest, os.path.basename(source))
+    if not skip_root:
+        os.makedirs(dest_path, exist_ok=True)
 
-    Path(_package_path).mkdir(parents=True, exist_ok=True)
+    for item in os.listdir(source):
+        src_item = os.path.join(source, item)
+        dest_item = os.path.join(dest_path, item)
+        if not skip_path(src_item, package, parent):
+            if os.path.isfile(src_item):
+                shutil.copy2(src_item, dest_item)
+            elif os.path.isdir(src_item):
+                if (s := src_item.replace("\\", "/")).startswith("./"):
+                    s = s[2:]
+                recursive_copy(package, src_item, dest_path, parent=s)
 
-    for file_name in os.listdir("."):
-        if file_name.lower().endswith(".md") or file_name in __SKIP:
-            continue
-        shutil.move(file_name, _package_path)
 
-    shutil.copy("../__init__.py", "./taipy/__init__.py")
+def main():
+    parser = argparse.ArgumentParser(
+        description="Creates the directory structure to build a Taipy package.",
+        formatter_class=argparse.RawTextHelpFormatter,
+    )
+    parser.add_argument(
+        "package",
+        type=Package.check_argument,
+        action="store",
+        help="""The name of the package to setup the build version for.
+This must be the short name of a Taipy package (common, core...) or 'taipy'.
+""",
+    )
+    parser.add_argument("version", type=Version.check_argument, action="store", help="Version of the package to build.")
+    args = parser.parse_args()
+    package = Package(args.package)
+
+    if package.name == "taipy":
+        # Check that gui_core bundle was built
+        if not os.path.exists("taipy/gui_core/lib/taipy-gui-core.js"):
+            raise SystemError("Taipy GUI-Core bundle was not built")
+    elif package.name == "gui":
+        # Check that gui bundle was built
+        if not os.path.exists("taipy/gui/webapp/taipy-gui.js"):
+            raise SystemError("Taipy GUI bundle was not built")
+
+    # Create 'build_<package>' target directory and its subdirectory 'taipy' if needed
+    build_dir = Path(DEST_ROOT + package.short_name)
+    if build_dir.exists():
+        print(f"Removing legacy directory '{build_dir}'")  # noqa: T201
+        shutil.rmtree(build_dir)
+    dest_dir = build_dir
+    if package.name != "taipy":
+        dest_dir = build_dir / "taipy"
+    dest_dir.mkdir(parents=True, exist_ok=True)
+
+    # Copy the package build files from taipy[/package] to build_<package>/taipy
+    recursive_copy(package, "." if package.name == "taipy" else package.package_dir, dest_dir)
+
+    # This is needed for local builds (i.e. not in a Github workflow)
+    if package.name == "taipy":
+        # Needs the frontend build scripts
+        tools_dir = build_dir / "tools" / "frontend"
+        tools_dir.mkdir(parents=True, exist_ok=True)
+        shutil.copy2("tools/frontend/bundle_build.py", tools_dir)
+        # Copy the build files from tools/packages/taipy to build_taipy
+        recursive_copy(package, Path("tools") / "packages" / "taipy", build_dir, skip_root=True)
+    else:
+        build_package_dir = build_dir / package.package_dir
+        # Copy build files from package to build dir
+        for filename in BUILD_CP_FILES:
+            shutil.copy2(build_package_dir / filename, build_dir)
+        # Move build files from package to build dir
+        for filename in BUILD_MV_FILES:
+            shutil.move(build_package_dir / filename, build_dir)
+        # Copy the build files from tools/packages/taipy-<package> to build_<package>
+        recursive_copy(package, Path("tools") / "packages" / f"taipy-{package.short_name}", build_dir, skip_root=True)
+
+    # Check that versions were set in setup.requirements.txt
+    with open(build_dir / "setup.requirements.txt") as requirements_file:
+        for line in requirements_file:
+            if match := re.fullmatch(r"(taipy\-\w+)(.*)", line.strip()):
+                if not match[2]:  # Version not updated
+                    print(f"setup.requirements.txt not up-to-date in 'tools/packages/{package.short_name}'.")  # noqa: T201
+                    raise SystemError(f"Version for dependency on {match[1]} is missing.")
+    # Update package's version.json
+    with open(build_dir / package.package_dir / "version.json", "w") as version_file:
+        json.dump(args.version.to_dict(), version_file)
+
+    # Copy topmost __init__
+    if package.name != "taipy":
+        shutil.copy2(Path("taipy") / "__init__.py", dest_dir)
+
+
+if __name__ == "__main__":
+    main()

+ 47 - 0
tools/release/bump_patch_version.py

@@ -0,0 +1,47 @@
+# Copyright 2021-2025 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+# --------------------------------------------------------------------------------------------------
+# Increments the patch version number in all the packages' version.json file.
+#
+# Invoked from the workflow in build-and-release.yml when releasing production packages.
+# --------------------------------------------------------------------------------------------------
+
+import argparse
+
+from common import Package, Version
+
+
+def main():
+    parser = argparse.ArgumentParser(description="Increments the patch version number of a package.",
+                                     formatter_class=argparse.RawTextHelpFormatter)
+    # <package> argument
+    def _check_package(value: str) -> str:
+        n_value = value.lower()
+        if n_value in Package.names(True) or value == "all":
+            return n_value
+        raise argparse.ArgumentTypeError(f"'{value}' is not a valid Taipy package name.")
+    parser.add_argument("package",
+                        type=_check_package,
+                        action="store",  help="""The name of the package to increment the patch version number.
+This should be the short name of a Taipy package (common, core...) or 'taipy'.
+If can also be set to 'ALL' then all packages are impacted.
+""")
+    args = parser.parse_args()
+
+    for package_name in [args.package] if args.package != "all" else Package.names(True):
+        package = Package(package_name)
+        version = package.load_version()
+        if version.ext:
+            raise ValueError(f"Package version for '{package.name}' has an extension ({version.full_name}).")
+        package.save_version(Version(version.major, version.minor, version.patch + 1))
+
+if __name__ == "__main__":
+    main()

+ 0 - 97
tools/release/bump_version.py

@@ -1,97 +0,0 @@
-# Copyright 2021-2025 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import json
-import os
-import re
-from dataclasses import asdict, dataclass
-from typing import Optional
-
-
-@dataclass
-class Version:
-    major: str
-    minor: str
-    patch: str
-    ext: Optional[str] = None
-
-    def bump_ext_version(self) -> None:
-        if not self.ext:
-            return
-        reg = re.compile(r"[0-9]+$")
-        num = reg.findall(self.ext)[0]
-
-        self.ext = self.ext.replace(num, str(int(num) + 1))
-
-    def validate_suffix(self, suffix="dev"):
-        if suffix not in self.ext:
-            raise Exception(f"Version does not contain suffix {suffix}")
-
-    @property
-    def name(self) -> str:
-        """returns a string representation of a version"""
-        return f"{self.major}.{self.minor}.{self.patch}"
-
-    @property
-    def dev_name(self) -> str:
-        """returns a string representation of a version"""
-        return f"{self.name}.{self.ext}"
-
-    def __str__(self) -> str:
-        """returns a string representation of a version"""
-        version_str = f"{self.major}.{self.minor}.{self.patch}"
-        if self.ext:
-            version_str = f"{version_str}.{self.ext}"
-        return version_str
-
-
-def __load_version_from_path(base_path: str) -> Version:
-    """Load version.json file from base path."""
-    with open(os.path.join(base_path, "version.json")) as version_file:
-        data = json.load(version_file)
-        return Version(**data)
-
-
-def __write_version_to_path(base_path: str, version: Version) -> None:
-    with open(os.path.join(base_path, "version.json"), "w") as version_file:
-        json.dump(asdict(version), version_file)
-
-
-def extract_version(base_path: str) -> Version:
-    """
-    Load version.json file from base path and return the version string.
-    """
-    return __load_version_from_path(base_path)
-
-
-def bump_ext_version(version: Version, _base_path: str) -> None:
-    version.bump_ext_version()
-    __write_version_to_path(_base_path, version)
-
-
-
-if __name__ == "__main__":
-    paths = (
-         [
-            f"taipy{os.sep}common",
-            f"taipy{os.sep}core",
-            f"taipy{os.sep}rest",
-            f"taipy{os.sep}gui",
-            f"taipy{os.sep}templates",
-            "taipy",
-        ]
-    )
-
-    for _path in paths:
-        _version = extract_version(_path)
-        bump_ext_version(_version, _path)
-    print(f"NEW_VERSION={_version.dev_name}") # noqa T201 # type: ignore[reportPossiblyUnboundVariable]
-

+ 421 - 0
tools/release/common.py

@@ -0,0 +1,421 @@
+# Copyright 2021-2025 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+# --------------------------------------------------------------------------------------------------
+# Common artifacts used by the other scripts located in this directory.
+# --------------------------------------------------------------------------------------------------
+import argparse
+import json
+import os
+import re
+import subprocess
+import typing as t
+from dataclasses import asdict, dataclass
+from datetime import datetime
+from pathlib import Path
+from functools import total_ordering
+import requests
+
+
+# --------------------------------------------------------------------------------------------------
+@dataclass(frozen=True)
+class Version:
+    """Helps manipulate version numbers."""
+
+    major: int
+    minor: int
+    patch: int = 0
+    ext: t.Optional[str] = None
+
+    # Matching level
+    MAJOR: t.ClassVar[int] = 1
+    MINOR: t.ClassVar[int] = 2
+    PATCH: t.ClassVar[int] = 3
+
+    # Unknown version constant
+    UNKNOWN: t.ClassVar["Version"]
+
+    @property
+    def name(self) -> str:
+        """Returns a string representation of this Version without the extension part."""
+        return f"{self.major}.{self.minor}.{self.patch}"
+
+    @property
+    def full_name(self) -> str:
+        """Returns a full string representation of this Version."""
+        return f"{self.name}.{self.ext}" if self.ext else self.name
+
+    def __str__(self) -> str:
+        """Returns a string representation of this version."""
+        return self.full_name
+
+    def __repr__(self) -> str:
+        """Returns a full string representation of this version."""
+        ext = f".{self.ext}" if self.ext else ""
+        return f"Version({self.major}.{self.minor}.{self.patch}{ext})"
+
+    @classmethod
+    def from_string(cls, version: str):
+        """Creates a Version from a string.
+
+        Parameters:
+            version: a version name as a string.<br/>
+              The format should be "<major>.<minor>[.<patch>[.<extension>]] where
+
+              - <major> must be a number, indicating the major number of the version
+              - <minor> must be a number, indicating the minor number of the version
+              - <patch> must be a number, indicating the patch level of the version. Optional.
+              - <extension> must be a string. It is common practice that <extension> ends with a
+                number, but it is not required. Optional.
+        Returns:
+            A new Version object with the appropriate values that were parsed.
+        """
+        match = re.fullmatch(r"(\d+)\.(\d+)(?:\.(\d+))?(?:\.([^\s]+))?", version)
+        if match:
+            major = int(match[1])
+            minor = int(match[2])
+            patch = int(match[3]) if match[3] else 0
+            ext = match[4]
+            return cls(major=major, minor=minor, patch=patch, ext=ext)
+        else:
+            raise ValueError(f"String not in expected format: {version}")
+
+    def to_dict(self) -> dict[str, str]:
+        """Returns this Version as a dictionary."""
+        return {k: v for k, v in asdict(self).items() if v is not None}
+
+    @staticmethod
+    def check_argument(value: str) -> "Version":
+        """Checks version parameter in an argparse context."""
+        try:
+            version = Version.from_string(value)
+        except Exception as e:
+            raise argparse.ArgumentTypeError(f"'{value}' is not a valid version number.") from e
+        return version
+
+    def validate_extension(self, ext="dev"):
+        """Returns True if the extension part of this Version is the one queried."""
+        return self.split_ext()[0] == ext
+
+    def split_ext(self) -> t.Tuple[str, int]:
+        """Splits extension into the (identifier, index) tuple
+
+        Returns:
+            ("", -1) if there is no extension.
+            (extension, -1) if there is no extension index.
+            (extension, index) if there is an extension index (e.g. "dev3").
+        """
+        if not self.ext or (match := re.fullmatch(r"(.*?)(\d+)?", self.ext)) is None:
+            return ("", -1)  # No extension
+        # Potentially no index
+        return (match[1], int(match[2]) if match[2] else -1)
+
+    def is_compatible(self, version: "Version") -> bool:
+        """Checks if this version is compatible with another.
+
+        Version v1 is defined as being compatible with version v2 if a package built with version v1
+        can safely depend on another package built with version v2.<br/>
+        Here are the conditions set when checking whether v1 is compatible with v2:
+
+        - If v1 and v2 have different major or minor numbers, they are not compatible.
+        - If v1 has no extension, it is compatible only with v2 that have no extension.
+        - If v1 has an extension, it is compatible with any v2 that has the same extension, no
+          matter the extension index.
+
+        I.e.:
+            package-1.[m].[t] is NOT compatible with any sub-package-[M].* where M != 1
+            package-1.2.[t] is NOT compatible with any sub-package-1.[m].* where m != 2
+            package-1.2.[t] is compatible with all sub-package-1.2.*
+            package-1.2.[t].ext[X] is compatible with all sub-package-1.2.*.ext*
+            package-1.2.3 is NOT compatible with any sub-package-1.2.*.*
+            package-1.2.3.extA is NOT compatible with any sub-package-1.2.*.extB if extA != extB,
+               independently of a potential extension index.
+
+        Parameters:
+            version: the version to check compatibility against.
+
+        Returns:
+            True is this Version is compatible with *version* and False if it is not.
+        """
+        if self.major != version.major or self.minor != version.minor:
+            return False
+        if self.patch > version.patch:
+            return True
+
+        # No extensions on either → Compatible
+        if not self.ext and not version.ext:
+            return True
+
+        # self has extension, version doesn't → Compatible
+        if self.ext and not version.ext:
+            return True
+
+        # Version has extension, self doesn't → Not compatible
+        if not self.ext and version.ext:
+            return False
+
+        # Both have extensions → check identifiers. Dissimilar identifiers → Not compatible
+        self_prefix, _ = self.split_ext()
+        other_prefix, _ = version.split_ext()
+        if self_prefix != other_prefix:
+            return False
+
+        # Same identifiers → Compatible
+        return True
+
+    def matches(self, version: "Version", level: int = PATCH) -> bool:
+        """Checks whether this version matches another, up to some level.
+
+        Arguments:
+            version: The version to check against.
+            level: The level of precision for the match:
+            - Version.MAJOR: compare only the major version;
+            - Version.MINOR: compare major and minor versions;
+            - Version.PATCH: compare major, minor, and patch versions.
+
+        Returns:
+            True if the versions match up to the given level, False otherwise.
+        """
+        if self.major != version.major:
+            return False
+        if level >= self.MINOR and self.minor != version.minor:
+            return False
+        if level >= self.PATCH and self.patch != version.patch:
+            return False
+        return True
+
+    def __lt__(self, other: "Version") -> bool:
+        if not isinstance(other, Version):
+            return NotImplemented
+
+        # Compare major, minor, patch
+        self_tuple = (self.major, self.minor, self.patch)
+        other_tuple = (other.major, other.minor, other.patch)
+        if self_tuple != other_tuple:
+            return self_tuple < other_tuple
+
+        # Same version number, now compare extensions
+        return self._ext_sort_key() < other._ext_sort_key()
+
+    def _ext_sort_key(self) -> t.Tuple[int, str, int]:
+        """
+        Defines ordering for extensions.
+        Final versions (None) are considered greater than prereleases.
+
+        Example sort order:
+        1.0.0.dev1 < 1.0.0.rc1 < 1.0.0 < 1.0.1
+        """
+        if self.ext is None:
+            return (2, "", 0)  # Final release — highest priority
+
+        # Parse extension like "dev1" into prefix + number
+        match = re.match(r"([a-zA-Z]+)(\d*)", self.ext)
+        if match:
+            label, num = match.groups()
+            num_val = int(num) if num else 0
+            return (1, label, num_val)  # Pre-release
+        else:
+            return (0, self.ext, 0)  # Unknown extension format — lowest priority
+
+
+Version.UNKNOWN = Version(0, 0)
+
+
+# --------------------------------------------------------------------------------------------------
+class Package:
+    """Information on any Taipy package and sub-package."""
+
+    # Base names of the sub packages taipy-*
+    # They also are the names of the directory where their code belongs, under the 'taipy' directory,
+    # in the root of the Taipy repository.
+    # Order is important: package that are dependent of others must appear first.
+    NAMES = ["common", "core", "gui", "rest", "templates"]
+
+    _packages = {}
+
+    def __new__(cls, name: str) -> "Package":
+        if name.startswith("taipy-"):
+            name = name[6:]
+        if name in cls._packages:
+            return cls._packages[name]
+        package = super().__new__(cls)
+        cls._packages[name] = package
+        return package
+
+    def __init__(self, package: str) -> None:
+        self._name = package
+        if package == "taipy":
+            self._short = package
+        else:
+            if package.startswith("taipy-"):
+                self._short = package[6:]
+            else:
+                self._name = f"taipy-{package}"
+                self._short = package
+            if self._short not in Package.NAMES:
+                raise ValueError(f"Invalid package name '{package}'.")
+
+    @classmethod
+    def names(cls, add_taipy=False) -> list[str]:
+        return cls.NAMES + (["taipy"] if add_taipy else [])
+
+    @staticmethod
+    def check_argument(value: str) -> str:
+        """Checks package parameter in an argparse context."""
+        n_value = value.lower()
+        if n_value in Package.names(True) or value == "all":
+            return n_value
+        raise argparse.ArgumentTypeError(f"'{value}' is not a valid Taipy package name.")
+
+    @property
+    def name(self) -> str:
+        """The full package name."""
+        return self._name
+
+    @property
+    def short_name(self) -> str:
+        """The short package name."""
+        return self._short
+
+    @property
+    def package_dir(self) -> str:
+        return "taipy" if self._name == "taipy" else os.path.join("taipy", self._short)
+
+    def load_version(self) -> Version:
+        """
+        Returns the Version defined in this package's version.json content.
+        """
+        with open(Path(self.package_dir) / "version.json") as version_file:
+            data = json.load(version_file)
+            return Version(**data)
+
+    def save_version(self, version: Version) -> None:
+        """
+        Saves the Version to this package's version.json file.
+        """
+        with open(os.path.join(Path(self.package_dir), "version.json"), "w") as version_file:
+            json.dump(version.to_dict(), version_file)
+
+    def __str__(self) -> str:
+        """Returns a string representation of this package."""
+        return self.name
+
+    def __repr__(self) -> str:
+        """Returns a full string representation of this package."""
+        return f"Package({self.name})"
+
+    def __eq__(self, other):
+        return isinstance(other, Package) and (self._short, self._short) == (other._short, other._short)
+
+    def __hash__(self):
+        return hash(self._short)
+
+
+# --------------------------------------------------------------------------------------------------
+def run_command(*args) -> str:
+    return subprocess.run(args, stdout=subprocess.PIPE, text=True, check=True).stdout.strip()
+
+
+# --------------------------------------------------------------------------------------------------
+class Git:
+    @staticmethod
+    def get_current_branch() -> str:
+        return run_command("git", "branch", "--show-current")
+
+    @staticmethod
+    def get_github_path() -> t.Optional[str]:
+        """Retrieve current Git path (<owner>/<repo>)."""
+        branch_name = Git.get_current_branch()
+        remote_name = run_command("git", "config", f"branch.{branch_name}.remote")
+        url = run_command("git", "remote", "get-url", remote_name)
+        if match := re.fullmatch(r"(?:git@github\.com:|https://github\.com/)(.*)\.git", url):
+            return match[1]
+        print("ERROR - Could not retrieve GibHub branch path")  # noqa: T201
+        return None
+
+
+# --------------------------------------------------------------------------------------------------
+class Release(t.TypedDict):
+    version: Version
+    id: str
+    tag: str
+    published_at: str
+
+
+def fetch_github_releases(gh_path: t.Optional[str] = None) -> dict[Package, list[Release]]:
+    # Retrieve all available releases (potentially paginating results) for all packages.
+    # Returns a dictionary of package_short_name/list-of-releases pairs.
+    # A 'release' is a dictionary where "version" if the package version, "id" is the release id and
+    # "tag" is the release tag name.
+    headers = {"Accept": "application/vnd.github+json"}
+    all_releases: dict[str, list[Release]] = {}
+    if gh_path is None:
+        gh_path = Git.get_github_path()
+        if gh_path is None:
+            raise ValueError("Couldn't figure out GitHub branch path.")
+    url = f"https://api.github.com/repos/{gh_path}/releases"
+    page = 1
+    # Read all release versions and store them in a package_name - list[Version] dictionary
+    while url:
+        response = requests.get(url, params={"per_page": 50, "page": page}, headers=headers)
+        response.raise_for_status()  # Raise error for bad responses
+        for release in response.json():
+            release_id = release["id"]
+            tag = release["tag_name"]
+            published_at = release["published_at"]
+            pkg_ver, pkg = tag.split("-") if "-" in tag else (tag, "taipy")
+            # Drop legacy packages (config...)
+            if pkg != "taipy" and pkg not in Package.NAMES:
+                continue
+
+            # Exception for legacy version: v1.0.0 -> 1.0.0
+            if pkg_ver == "v1.0.0":
+                pkg_ver = pkg_ver[1:]
+            version = Version.from_string(pkg_ver)
+            new_release: Release = {"version": version, "id": release_id, "tag": tag, "published_at": published_at}
+            if releases := all_releases.get(pkg):
+                releases.append(new_release)
+            else:
+                all_releases[pkg] = [new_release]
+
+        # Check for pagination in the `Link` header
+        link_header = response.headers.get("Link", "")
+        if 'rel="next"' in link_header:
+            url = link_header.split(";")[0].strip("<>")  # Extract next page URL
+            page += 1
+        else:
+            url = None  # No more pages
+
+    # Sort all releases for all packages by publishing date (most recent first)
+    for p in all_releases.keys():
+        all_releases[p].sort(
+            key=lambda r: datetime.fromisoformat(r["published_at"].replace("Z", "+00:00")), reverse=True
+        )
+    # Build and return the dictionary using Package instances
+    return {Package(p): v for p, v in all_releases.items()}
+
+
+# --------------------------------------------------------------------------------------------------
+def fetch_latest_github_taipy_releases(
+    all_releases: t.Optional[dict[Package, list[Release]]] = None, gh_path: t.Optional[str] = None
+) -> Version:
+    # Retrieve all available releases if necessary
+    if all_releases is None:
+        all_releases = fetch_github_releases(gh_path)
+    # Find the latest 'taipy' version that has no extension
+    latest_taipy_version = Version.UNKNOWN
+    releases = all_releases.get(Package("taipy"))
+    if releases := all_releases.get(Package("taipy")):
+        # Retrieve all non-dev releases
+        versions = [release["version"] for release in releases if release["version"].ext is None]
+        # Find the latest
+        if versions:
+            latest_taipy_version = max(versions)
+    return latest_taipy_version

+ 98 - 0
tools/release/delete_dev_releases.py

@@ -0,0 +1,98 @@
+# Copyright 2021-2025 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+# --------------------------------------------------------------------------------------------------
+# Deletes dev releases and tags for a specific version from a GitHub repository.
+# --------------------------------------------------------------------------------------------------
+
+import argparse
+
+import requests
+from common import Git, Version, fetch_github_releases
+
+
+def main(arg_strings=None):
+    parser = argparse.ArgumentParser(
+        description="Deletes Taipy package dev releases and tags from GitHub.",
+        formatter_class=argparse.RawTextHelpFormatter,
+    )
+    parser.add_argument(
+        "version",
+        action="store",
+        type=Version.check_argument,
+        help="""The version (M.m.p) of the releases to be deleted.
+The indicated version must not have extensions.""",
+    )
+
+    def _check_repository_path(value: str):
+        if len(value.split("/")) != 2:
+            raise argparse.ArgumentTypeError(f"'{value}' is not a valid '<owner>/<repo>' path.")
+        return value
+
+    parser.add_argument(
+        "-r",
+        "--repository_path",
+        type=_check_repository_path,
+        help="""The '<owner>/<repo>' string that identifies the repository where releases are fetched.
+The default is the current repository.""",
+    )
+    parser.add_argument(
+        "-y",
+        "--yes",
+        action="store_true",
+        help="""Do not ask for confirmation of the deletion of the releases and tags.""",
+    )
+    args = parser.parse_args(arg_strings)
+
+    headers = {"Accept": "application/vnd.github+json"}
+    repository_path = args.repository_path if args.repository_path else Git.get_github_path()
+    all_releases = fetch_github_releases(repository_path)
+    found = False
+    if all_releases:
+        for package, releases in all_releases.items():
+            for release in releases:
+                release_version = release["version"]
+                release_id = release["id"]
+                release_tag = release["tag"]
+                if release_version.validate_extension() and args.version.match(release_version):
+                    found = True
+                    confirm = True if args.yes else False
+                    if not args.yes:
+                        print(f"\n➡️ Release: package: {package.name}, version: {release_version}")  # noqa: T201
+                        confirm = (
+                            input("❓ Do you want to delete this release and its tag? (y/N): ").strip().lower() != "y"
+                        )
+                    if confirm:
+                        # Delete release
+                        url = f"https://api.github.com/repos/{repository_path}/releases/{release_id}"
+                        response = requests.delete(url, headers=headers)
+                        if response.status_code == 204:
+                            print(f"✅ Successfully deleted release {release_version} for package '{package.name}'.")  # noqa: T201
+                        else:
+                            print(  # noqa: T201
+                                f"❌ Failed to delete release {release_version} for package '{package.name}':"
+                                + f" {response.status_code} - {response.text}"
+                            )
+                        # Delete tag
+                        url = f"https://api.github.com/repos/{repository_path}/git/refs/tags/{release_tag}'"
+                        response = requests.delete(url, headers=headers)
+                        if response.status_code == 204:
+                            print(f"✅ Successfully deleted tag {release_tag}.")  # noqa: T201
+                        else:
+                            print(f"❌ Failed to delete tag {release_tag}: {response.status_code} - {response.text}")  # noqa: T201
+                    else:
+                        print("ℹ️ Skipped.")  # noqa: T201
+
+    if not found:
+        print(f"No dev releases found for version {args.version}.")  # noqa: T201
+
+
+if __name__ == "__main__":
+    main()

+ 0 - 36
tools/release/delete_dev_releases.sh

@@ -1,36 +0,0 @@
-#!/bin/bash
-
-# Set the repository (format: owner/repo)
-REPO="Avaiga/taipy"
-
-# Get the list of pre-releases
-pre_releases=$(gh release list --repo "$REPO" --json tagName,isPrerelease --jq '.[] | select(.isPrerelease == true) | .tagName')
-
-# If there are no pre-releases, exit
-if [ -z "$pre_releases" ]; then
-    echo "No pre-releases found."
-    exit 0
-fi
-
-# Get the latest pre-release tag
-latest_pre_release=$(echo "$pre_releases" | head -n 1)
-
-# Prepare a list of releases to delete
-to_delete=()
-
-# Identify pre-releases to delete
-for tag in $pre_releases; do
-    if [[ "$tag" == *"$latest_pre_release"* ]]; then
-      echo "Latest release found! Skipping"
-    else
-      to_delete+=("$tag")
-    fi
-done
-
-# Delete the releases
-for tag in "${to_delete[@]}"; do
-    echo "Deleting pre-release with tag: $tag"
-    gh release delete "$tag" --repo "$REPO" --yes ----cleanup-tag
-done
-
-echo "Kept all pre-releases matching version: $latest_pre_release"

+ 0 - 80
tools/release/fetch_latest_versions.py

@@ -1,80 +0,0 @@
-# Copyright 2021-2025 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import sys
-
-import requests  # type: ignore
-
-
-def fetch_latest_releases_from_github(dev=False, target_version="", target_package=""):
-    releases = {}
-    url = "https://api.github.com/repos/Avaiga/taipy/releases"
-    response = requests.get(url)
-    resp_json = response.json()
-
-    for rel in resp_json:
-        tag = rel["tag_name"]
-
-        if not dev and ".dev" in tag:
-            continue
-        if "common" in tag:
-            releases["common"] = releases.get("common") or tag.split("-")[0]
-        elif "core" in tag:
-            releases["core"] = releases.get("core") or tag.split("-")[0]
-        elif "gui" in tag:
-            releases["gui"] = releases.get("gui") or tag.split("-")[0]
-        elif "rest" in tag:
-            releases["rest"] = releases.get("rest") or tag.split("-")[0]
-        elif "templates" in tag:
-            releases["templates"] = releases.get("templates") or tag.split("-")[0]
-        elif "-" not in tag:
-            releases["taipy"] = releases.get("taipy") or tag
-    releases[target_package] = target_version
-    return releases
-
-
-def fetch_latest_releases_from_pypi(dev=False, target_version="", target_package=""):
-    releases = {}
-
-    for pkg in ["common", "core", "gui", "rest", "templates"]:
-        url = f"https://pypi.org/pypi/taipy-{pkg}/json"
-        response = requests.get(url)
-        resp_json = response.json()
-        versions = list(resp_json["releases"].keys())
-        versions.reverse()
-
-        for ver in versions:
-            if not dev and ".dev" in ver:
-                continue
-            releases[pkg] = ver
-            break
-    releases[target_package] = target_version
-    return releases
-
-
-if __name__ == "__main__":
-    is_dev_version = sys.argv[1] == "dev"
-    is_pypi = sys.argv[2] == "true"
-    target_version = sys.argv[3]
-    target_package = sys.argv[4]
-
-    if is_dev_version and ".dev" not in target_version:
-        raise Exception("Version does not contain suffix .dev")
-
-    versions = {}
-
-    if not is_pypi:
-        versions = fetch_latest_releases_from_github(is_dev_version, target_version, target_package)
-    else:
-        versions = fetch_latest_releases_from_pypi(is_dev_version, target_version, target_package)
-
-    for name, version in versions.items():
-        print(f"{name}_VERSION={version}")  # noqa: T201

+ 0 - 97
tools/release/setup_project.py

@@ -1,97 +0,0 @@
-# Copyright 2021-2025 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import json
-import os
-import platform
-import re
-import subprocess
-import sys
-from pathlib import Path
-
-import toml  # type: ignore
-
-
-def get_requirements(pkg: str, env: str = "dev") -> list:
-    # get requirements from the different setups in tools/packages (removing taipy packages)
-    reqs = set()
-    pkg_name = pkg if pkg == "taipy" else f"taipy-{pkg}"
-    root_folder = Path(__file__).parent
-    package_path = os.path.join(root_folder.parent, "packages", pkg_name)
-    requirements_file = os.path.join(package_path, "setup.requirements.txt")
-    if os.path.exists(requirements_file):
-        reqs.update(Path(requirements_file).read_text("UTF-8").splitlines())
-    if env == "dev":
-        return [r for r in reqs if r and not r.startswith("taipy")]
-    return list(reqs)
-
-
-def update_pyproject(version_path: str, pyproject_path: str, env: str = "dev"):
-    with open(version_path) as version_file:
-        version = json.load(version_file)
-        version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
-        if vext := version.get("ext"):
-            version_string = f"{version_string}.{vext}"
-
-    pyproject_data = toml.load(pyproject_path)
-    pyproject_data["project"]["version"] = version_string
-    pyproject_data["project"]["urls"]["Release notes"] = f"https://docs.taipy.io/en/release-{version_string}/relnotes/"
-    pyproject_data["project"]["dependencies"] = get_requirements(get_pkg_name(pyproject_path), env)
-
-    with open(pyproject_path, "w", encoding="utf-8") as pyproject_file:
-        toml.dump(pyproject_data, pyproject_file)
-
-
-def _build_webapp(webapp_path: str):
-    already_exists = Path(webapp_path).exists()
-    if not already_exists:
-        os.system("cd ../../frontend/taipy-gui/dom && npm ci")
-        os.system("cd ../../frontend/taipy-gui && npm ci && npm run build")
-
-
-def get_pkg_name(path: str) -> str:
-    # The regex pattern
-    pattern = r"([^/\\]+)[/\\]pyproject\.toml$"
-
-    # Search for the pattern
-    match = re.search(pattern, os.path.abspath(path))
-    if not match:
-        raise ValueError(f"Could not find package name in path: {path}")
-    return match.group(1)
-
-
-if __name__ == "__main__":
-    _pyproject_path = os.path.join(sys.argv[1], "pyproject.toml")
-    try:
-        env = sys.argv[2]
-    except IndexError:
-        env = "dev"
-
-    pkg = get_pkg_name(_pyproject_path)
-    if pkg == "taipy":
-        _version_path = os.path.join(sys.argv[1], "taipy", "version.json")
-        _webapp_path = os.path.join(sys.argv[1], "taipy", "gui", "webapp", "index.html")
-    else:
-        _version_path = os.path.join(sys.argv[1], "version.json")
-        _webapp_path = os.path.join(sys.argv[1], "webapp", "index.html")
-
-    update_pyproject(_version_path, _pyproject_path, env)
-
-    if pkg == "gui":
-        _build_webapp(_webapp_path)
-
-    if pkg == "taipy":
-        subprocess.run(
-            ["python", "bundle_build.py"],
-            cwd=os.path.join("tools", "frontend"),
-            check=True,
-            shell=platform.system() == "Windows",
-        )

+ 0 - 125
tools/release/setup_version.py

@@ -1,125 +0,0 @@
-# Copyright 2021-2025 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import json
-import os
-import re
-import sys
-from dataclasses import asdict, dataclass
-from typing import Optional
-
-
-@dataclass
-class Version:
-    major: str
-    minor: str
-    patch: str
-    ext: Optional[str] = None
-
-    def bump_ext_version(self) -> None:
-        if not self.ext:
-            return
-        reg = re.compile(r"[0-9]+$")
-        num = reg.findall(self.ext)[0]
-
-        self.ext = self.ext.replace(num, str(int(num) + 1))
-
-    def validate_suffix(self, suffix="dev"):
-        if suffix not in self.ext:
-            raise Exception(f"Version does not contain suffix {suffix}")
-
-    @property
-    def name(self) -> str:
-        """returns a string representation of a version"""
-        return f"{self.major}.{self.minor}.{self.patch}"
-
-    @property
-    def dev_name(self) -> str:
-        """returns a string representation of a version"""
-        return f"{self.name}.{self.ext}"
-
-    def __str__(self) -> str:
-        """returns a string representation of a version"""
-        version_str = f"{self.major}.{self.minor}.{self.patch}"
-        if self.ext:
-            version_str = f"{version_str}.{self.ext}"
-        return version_str
-
-
-def __load_version_from_path(base_path: str) -> Version:
-    """Load version.json file from base path."""
-    with open(os.path.join(base_path, "version.json")) as version_file:
-        data = json.load(version_file)
-        return Version(**data)
-
-
-def __write_version_to_path(base_path: str, version: Version) -> None:
-    with open(os.path.join(base_path, "version.json"), "w") as version_file:
-        json.dump(asdict(version), version_file)
-
-
-def extract_version(base_path: str) -> Version:
-    """
-    Load version.json file from base path and return the version string.
-    """
-    return __load_version_from_path(base_path)
-
-
-def __setup_dev_version(version: Version, _base_path: str, name: Optional[str] = None) -> None:
-    version.validate_suffix()
-
-    name = f"{name}_VERSION" if name else "VERSION"
-
-    print(f"{name}={version.dev_name}")  # noqa: T201
-
-
-def bump_ext_version(version: Version, _base_path: str) -> None:
-    version.bump_ext_version()
-    __write_version_to_path(_base_path, version)
-
-
-def __setup_prod_version(version: Version, target_version: str, branch_name: str, name: str = None) -> None:
-    if str(version) != target_version:
-        raise ValueError(f"Current version={version} does not match target version={target_version}")
-
-    if target_branch_name := f"release/{version.major}.{version.minor}" != branch_name:
-        raise ValueError(
-            f"Branch name mismatch branch={branch_name} does not match target branch name={target_branch_name}"
-        )
-
-    name = f"{name}_VERSION" if name else "VERSION"
-    print(f"{name}={version.name}")  # noqa: T201
-
-
-if __name__ == "__main__":
-    paths = (
-        [sys.argv[1]]
-        if sys.argv[1] != "ALL"
-        else [
-            f"taipy{os.sep}common",
-            f"taipy{os.sep}core",
-            f"taipy{os.sep}rest",
-            f"taipy{os.sep}gui",
-            f"taipy{os.sep}templates",
-            "taipy",
-        ]
-    )
-    _environment = sys.argv[2]
-
-    for _path in paths:
-        _version = extract_version(_path)
-        _name = None if _path == "taipy" else _path.split(os.sep)[-1]
-
-        if _environment == "dev":
-            __setup_dev_version(_version, _path, _name)
-
-        if _environment == "production":
-            __setup_prod_version(_version, sys.argv[3], sys.argv[4], _name)

+ 192 - 0
tools/release/setup_versions.py

@@ -0,0 +1,192 @@
+# Copyright 2021-2025 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+# --------------------------------------------------------------------------------------------------
+# Checks that build version matches package(s) version.
+#
+# Invoked from the workflow in build-and-release.yml.
+#
+# Outputs a line for each package (all packages if 'all'):
+#   <package_short_name>_VERSION=<release_version>
+#      the release version of the package that gets built.
+#      - if 'dev' release mode, that would be M.m.p.dev<x>
+#        where dev<x> is the first available available release version number that has no release yet.
+#      - if 'production' release mode, that would be M.m.p, as read in the packages's version.json
+#        file.
+# If a 'production' release mode is requested, a similar line is issued indicating the next patch
+# version number:
+#   NEXT_<package_short_name>_VERSION=<next_release_version>
+# --------------------------------------------------------------------------------------------------
+
+import argparse
+import os
+
+from common import Git, Package, Version, fetch_github_releases, fetch_latest_github_taipy_releases
+
+
+def __setup_dev_version(
+    package: Package, version: Version, released_versions: list[Version], target_version: dict[str, Version]
+) -> None:
+    # Find latest dev release for that version
+    ext_index = 0
+    latest_version = (
+        max([v for v in released_versions if v.matches(version) and v.validate_extension()])
+        if released_versions
+        else None
+    )
+    ext, ext_index = ("dev", 0)
+    if latest_version:
+        ext, ext_index = latest_version.split_ext()
+        ext_index += 1
+    target_version[package.short_name] = Version(version.major, version.minor, version.patch, f"{ext}{ext_index}")
+
+
+def __setup_prod_version(
+    package: Package,
+    version: Version,
+    branch_name: str,
+    target_versions: dict[str, Version],
+    next_versions: dict[str, Version],
+) -> None:
+    # Production releases can only be performed from a release branch
+    if (os.environ.get("GITHUB_ACTIONS") == "true") and (
+        target_branch_name := f"release/{version.major}.{version.minor}"
+    ) != branch_name:
+        raise ValueError(f"Current branch '{branch_name}' does not match expected '{target_branch_name}'")
+    target_versions[package.short_name] = version
+    # Compute next patch version
+    next_versions[package.short_name] = Version(version.major, version.minor, version.patch + 1)
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description="Computes the Taipy package versions to be build.", formatter_class=argparse.RawTextHelpFormatter
+    )
+
+    # <package> argument
+    def _check_package(value: str) -> str:
+        n_value = value.lower()
+        if n_value in Package.names(True) or value == "all":
+            return n_value
+        raise argparse.ArgumentTypeError(f"'{value}' is not a valid Taipy package name.")
+
+    parser.add_argument(
+        "package",
+        type=_check_package,
+        action="store",
+        help="""The name of the package to setup the build version for.
+This should be the short name of a Taipy package (common, core...) or 'taipy'.
+If can also be set to 'ALL' then all versions for all packages are computed.
+""",
+    )
+
+    # <version> argument
+    parser.add_argument(
+        "-v",
+        "--version",
+        type=Version.check_argument,
+        required=True,
+        help="""Full name of the target version (M.m.p).
+This version must match the one in the package's 'version.json' file.
+""",
+    )
+    # <release_type> argument
+    parser.add_argument(
+        "-t",
+        "--release_type",
+        choices=["dev", "production"],
+        default="dev",
+        type=str.lower,
+        help="""Type of release to build (default: dev).
+
+If 'dev', the release version is computed from the existing released packages versions
+in the repository:
+- If there is no release with version <version>, the release will have the version set
+  to <version>.dev0.
+- If there is a <version>.dev<n> release, the release will have the version <version>.dev<n+1>.
+- If there is a <version> release (with no 'dev' part), the script fails.
+
+If 'production', the package version is computed from for existing released packages versions
+""",
+    )
+
+    # <repository_name> argument
+    def _check_repository_name(value: str) -> str:
+        if len(value.split("/")) != 2:
+            raise argparse.ArgumentTypeError(f"'{value}' is not a valid '<owner>/<repo>' pair.")
+        return value
+
+    parser.add_argument(
+        "-r",
+        "--repository_name",
+        type=_check_repository_name,
+        help="""The '<owner>/<repo>' string that identifies the repository where releases are fetched.
+The default is the current repository.""",
+    )
+    # <branch_name> argument
+    parser.add_argument(
+        "-b",
+        "--branch_name",
+        help="""The name of the branch to check package versions from."
+If <release_type> is 'production', this branch has to be a release branch ('release/*').
+This value is extracted from the current branch by default.
+        """,
+    )
+    args = parser.parse_args()
+
+    all_releases = fetch_github_releases(args.repository_name)
+    target_versions = {}
+    next_versions = {}
+    for package_name in Package.names(True):
+        package_releases = all_releases.get(Package(package_name))
+        released_versions = [release["version"] for release in package_releases] if package_releases else []
+        if args.release_type == "production":
+            released_versions = list(filter(lambda v: v.ext is None, released_versions))
+        else:
+            released_versions = list(filter(lambda v: v.ext is not None, released_versions))
+        # Matching versions
+        released_versions = [v for v in released_versions if v.matches(args.version, Version.MINOR)]
+        target_version = max(released_versions) if released_versions else None
+        target_versions[package_name] = target_version if target_version else Version.UNKNOWN
+
+    packages: list[str] = [args.package] if args.package != "all" else Package.names(True)
+    branch_name = args.branch_name if args.branch_name else Git.get_current_branch()
+
+    for package_name in packages:
+        package = Package(package_name)
+        version = package.load_version()
+        if version.ext:
+            raise ValueError(f"Package version for '{package.name}' has an extension ({version.full_name}).")
+        if version != args.version:
+            raise ValueError(
+                f"Target version ({args.version.full_name}) does not match version"
+                + f" {version.full_name} in package {package.name}."
+            )
+        package_releases = all_releases.get(package)
+        released_versions = [release["version"] for release in package_releases] if package_releases else []
+        if version in released_versions:
+            raise ValueError(f"{version} is already released for package {package.name}.")
+
+        if args.release_type == "dev":
+            __setup_dev_version(package, version, released_versions, target_versions)
+        else:
+            __setup_prod_version(package, version, branch_name, target_versions, next_versions)
+
+    for p, v in target_versions.items():
+        print(f"{p}_VERSION={v}")  # noqa: T201
+    if next_versions:
+        for p, v in next_versions.items():
+            print(f"NEXT_{p}_VERSION={v}")  # noqa: T201
+    # Print out the latest 'taipy' version that has no extension
+    print(f"LATEST_TAIPY_VERSION={fetch_latest_github_taipy_releases(all_releases)}")  # noqa: T201
+
+
+if __name__ == "__main__":
+    main()

+ 0 - 43
tools/release/update_setup.py

@@ -1,43 +0,0 @@
-# Copyright 2021-2025 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-import sys
-
-
-def update_setup() -> None:
-    with open("setup.taipy.py", mode="r") as setup_r, open("setup.py", mode="w") as setup_w:
-        in_requirements = False
-        looking = True
-        for line in setup_r:
-            if looking:
-                if line.lstrip().startswith("requirements") and line.rstrip().endswith("["):
-                    in_requirements = True
-                elif in_requirements:
-                    if line.strip() == "]":
-                        looking = False
-                    else:
-                        if line.lstrip().startswith('"taipy-gui@git+https'):
-                            start = line.find('"taipy-gui')
-                            end = line.rstrip().find(",")
-                            line = f'{line[:start]}"taipy-gui=={sys.argv[1]}"{line[end:]}'
-                        elif line.lstrip().startswith('"taipy-rest@git+https'):
-                            start = line.find('"taipy-rest')
-                            end = line.rstrip().find(",")
-                            line = f'{line[:start]}"taipy-rest=={sys.argv[2]}"{line[end:]}'
-                        elif line.lstrip().startswith('"taipy-templates@git+https'):
-                            start = line.find('"taipy-templates')
-                            end = line.rstrip().find(",")
-                            line = f'{line[:start]}"taipy-templates=={sys.argv[3]}"{line[end:]}'
-            setup_w.write(line)
-
-
-if __name__ == "__main__":
-    update_setup()

+ 124 - 25
tools/release/update_setup_requirements.py

@@ -8,45 +8,144 @@
 # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
+# --------------------------------------------------------------------------------------------------
+# Updates the setup.requirements.txt files for a given package.
+#
+# Invoked by workflows/build-and-release-single-package.yml and workflows/build-and-release.yml.
+# Working directory must be [root_dir].
+# --------------------------------------------------------------------------------------------------
 
+import argparse
 import os
-import sys
-from typing import Dict
+import re
+import typing as t
+
+from common import Git, Package, Version
 
 BASE_PATH = "./tools/packages"
 
 
-def __build_taipy_package_line(line: str, version: str, publish_on_py_pi: bool) -> str:
-    _line = line.strip()
-    if publish_on_py_pi:
-        return f"{_line}=={version}\n"
-    tag = f"{version}-{_line.split('-')[1]}"
-    tar_name = f"{_line}-{version}"
-    return f"{_line} @ https://github.com/Avaiga/taipy/releases/download/{tag}/{tar_name}.tar.gz\n"
+def __build_taipy_package_line(line: str, version: Version, use_pypi: bool, gh_path: t.Optional[str]) -> str:
+    line = line.strip()
+    if use_pypi:
+        # Target dependency version should the latest compatible with 'version'
+        return f"{line} >={version.major}.{version.minor},<{version.major}.{version.minor + 1}\n"
+    tag = f"{version}-{line.split('-')[1]}"
+    tar_name = f"{line}-{version}"
+    return f"{line} @ https://github.com/{gh_path}/releases/download/{tag}/{tar_name}.tar.gz\n"
 
 
-def update_setup_requirements(package: str, versions: Dict, publish_on_py_pi: bool) -> None:
-    _path = os.path.join(BASE_PATH, package, "setup.requirements.txt")
+def update_setup_requirements(
+    package: Package, versions: dict[str, Version], publish_on_py_pi: bool, gh_path: t.Optional[str]
+) -> None:
+    path = os.path.join(BASE_PATH, package.name, "setup.requirements.txt")
     lines = []
-    with open(_path, mode="r") as req:
+    with open(path, mode="r") as req:
         for line in req:
-            if v := versions.get(line.strip()):
-                line = __build_taipy_package_line(line, v, publish_on_py_pi)
+            if match := re.match(r"^taipy(:?\-\w+)?\s*", line, re.MULTILINE):
+                # Add subpackage version if not forced
+                if not line[match.end() :] and (v := versions.get(line.strip())):
+                    if v == Version.UNKNOWN:
+                        raise ValueError(f"Missing version for dependency '{line.strip()}'.")
+                    line = __build_taipy_package_line(line, v, publish_on_py_pi, gh_path)
             lines.append(line)
 
-    with open(_path, "w") as file:
+    with open(path, "w") as file:
         file.writelines(lines)
+    # Issue the generated files for logging information
+    print(f"Generated setup.requirements.txt for package '{package}'")  # noqa: T201
+    for line in lines:
+        print(line.strip())  # noqa: T201
+    print("-" * 32)  # noqa: T201
 
 
-if __name__ == "__main__":
-    _package = sys.argv[1]
-    _versions = {
-        "taipy-common": sys.argv[2],
-        "taipy-core": sys.argv[3],
-        "taipy-gui": sys.argv[4],
-        "taipy-rest": sys.argv[5],
-        "taipy-templates": sys.argv[6],
+def main():
+    parser = argparse.ArgumentParser(
+        description="Computes the Taipy package versions to be build.", formatter_class=argparse.RawTextHelpFormatter
+    )
+
+    # <package> argument
+    parser.add_argument(
+        "package",
+        type=Package,
+        action="store",
+        help="""The name of the package to setup the build version for.
+This must be the short name of a Taipy package (common, core...) or 'taipy'.
+""",
+    )
+
+    # <common-version> argument
+    parser.add_argument(
+        "common_version",
+        type=Version.check_argument,
+        action="store",
+        help="Full name of the target version (M.m.p) for the taipy-common package.",
+    )
+    # <core-version> argument
+    parser.add_argument(
+        "core_version",
+        type=Version.check_argument,
+        action="store",
+        help="Full name of the target version (M.m.p) for the taipy-core package.",
+    )
+    # <gui-version> argument
+    parser.add_argument(
+        "gui_version",
+        type=Version.check_argument,
+        action="store",
+        help="Full name of the target version (M.m.p) for the taipy-gui package.",
+    )
+    # <rest-version> argument
+    parser.add_argument(
+        "rest_version",
+        type=Version.check_argument,
+        action="store",
+        help="Full name of the target version (M.m.p) for the taipy-rest package.",
+    )
+    # <rest-version> argument
+    parser.add_argument(
+        "templates_version",
+        type=Version.check_argument,
+        action="store",
+        help="Full name of the target version (M.m.p) for the taipy-templates package.",
+    )
+    # <dependencies-location> argument
+    parser.add_argument(
+        "-deps",
+        "-dl",
+        "--dependencies-location",
+        type=str.lower,
+        choices=["github", "pypi"],
+        required=True,
+        help="Where to point dependencies to.",
+    )
+
+    # <repository_name> argument
+    def _check_repository_name(value: str) -> str:
+        if len(value.split("/")) != 2:
+            raise argparse.ArgumentTypeError(f"'{value}' is not a valid '<owner>/<repo>' pair.")
+        return value
+
+    parser.add_argument(
+        "-r",
+        "--repository_name",
+        type=_check_repository_name,
+        help="""The '<owner>/<repo>' string that identifies the repository where releases are fetched.
+The default is the current repository.""",
+    )
+
+    args = parser.parse_args()
+    versions = {
+        "taipy-common": args.common_version,
+        "taipy-core": args.core_version,
+        "taipy-gui": args.gui_version,
+        "taipy-rest": args.rest_version,
+        "taipy-templates": args.templates_version,
     }
-    _publish_on_py_pi = True if sys.argv[7] == "true" else False
+    publish_on_py_pi = args.dependencies_location == "pypi"
+    repository_name = args.repository_name if args.repository_name else Git.get_github_path()
+    update_setup_requirements(args.package, versions, publish_on_py_pi, repository_name)
 
-    update_setup_requirements(_package, _versions, _publish_on_py_pi)
+
+if __name__ == "__main__":
+    main()