Quellcode durchsuchen

Merge pull request #508 from Avaiga/feature/merge-config

Feature/merge config
João André vor 1 Jahr
Ursprung
Commit
212ed5efb7
100 geänderte Dateien mit 7103 neuen und 1 gelöschten Zeilen
  1. 1 1
      .github/workflows/linter.yml
  2. 12 0
      src/taipy/_cli/_base_cli/__init__.py
  3. 77 0
      src/taipy/_cli/_base_cli/_cli.py
  4. 14 0
      src/taipy/config/.coveragerc
  5. 22 0
      src/taipy/config/.editorconfig
  6. 23 0
      src/taipy/config/.flake8
  7. 2 0
      src/taipy/config/.gitattributes
  8. 15 0
      src/taipy/config/.github/ISSUE_TEMPLATE.md
  9. 40 0
      src/taipy/config/.github/ISSUE_TEMPLATE/bug_report.md
  10. 11 0
      src/taipy/config/.github/ISSUE_TEMPLATE/feature-improvement.md
  11. 21 0
      src/taipy/config/.github/ISSUE_TEMPLATE/new-feature.md
  12. 38 0
      src/taipy/config/.github/workflows/codeql-analysis.yml
  13. 34 0
      src/taipy/config/.github/workflows/coverage.yml
  14. 29 0
      src/taipy/config/.github/workflows/generate_pyi.yml
  15. 90 0
      src/taipy/config/.github/workflows/publish.yml
  16. 105 0
      src/taipy/config/.github/workflows/release-dev.yml
  17. 69 0
      src/taipy/config/.github/workflows/release.yml
  18. 29 0
      src/taipy/config/.github/workflows/setuptools.yml
  19. 37 0
      src/taipy/config/.github/workflows/tests.yml
  20. 95 0
      src/taipy/config/.gitignore
  21. 9 0
      src/taipy/config/.isort.cfg
  22. 10 0
      src/taipy/config/.license-header
  23. 46 0
      src/taipy/config/.pre-commit-config.yaml
  24. 128 0
      src/taipy/config/CODE_OF_CONDUCT.md
  25. 135 0
      src/taipy/config/CONTRIBUTING.md
  26. 29 0
      src/taipy/config/INSTALLATION.md
  27. 202 0
      src/taipy/config/LICENSE
  28. 2 0
      src/taipy/config/MANIFEST.in
  29. 28 0
      src/taipy/config/Pipfile
  30. 68 0
      src/taipy/config/README.md
  31. 74 0
      src/taipy/config/__init__.py
  32. 95 0
      src/taipy/config/_config.py
  33. 10 0
      src/taipy/config/_config_comparator/__init__.py
  34. 162 0
      src/taipy/config/_config_comparator/_comparator_result.py
  35. 183 0
      src/taipy/config/_config_comparator/_config_comparator.py
  36. 14 0
      src/taipy/config/_init.py
  37. 10 0
      src/taipy/config/_serializer/__init__.py
  38. 161 0
      src/taipy/config/_serializer/_base_serializer.py
  39. 43 0
      src/taipy/config/_serializer/_json_serializer.py
  40. 42 0
      src/taipy/config/_serializer/_toml_serializer.py
  41. 10 0
      src/taipy/config/checker/__init__.py
  42. 32 0
      src/taipy/config/checker/_checker.py
  43. 10 0
      src/taipy/config/checker/_checkers/__init__.py
  44. 59 0
      src/taipy/config/checker/_checkers/_auth_config_checker.py
  45. 81 0
      src/taipy/config/checker/_checkers/_config_checker.py
  46. 42 0
      src/taipy/config/checker/issue.py
  47. 60 0
      src/taipy/config/checker/issue_collector.py
  48. 10 0
      src/taipy/config/common/__init__.py
  49. 18 0
      src/taipy/config/common/_classproperty.py
  50. 50 0
      src/taipy/config/common/_config_blocker.py
  51. 20 0
      src/taipy/config/common/_repr_enum.py
  52. 151 0
      src/taipy/config/common/_template_handler.py
  53. 27 0
      src/taipy/config/common/_validate_id.py
  54. 45 0
      src/taipy/config/common/frequency.py
  55. 49 0
      src/taipy/config/common/scope.py
  56. 250 0
      src/taipy/config/config.py
  57. 836 0
      src/taipy/config/config.pyi
  58. 5 0
      src/taipy/config/contributors.txt
  59. 10 0
      src/taipy/config/exceptions/__init__.py
  60. 30 0
      src/taipy/config/exceptions/exceptions.py
  61. 0 0
      src/taipy/config/global_app/__init__.py
  62. 62 0
      src/taipy/config/global_app/global_app_config.py
  63. 72 0
      src/taipy/config/section.py
  64. 62 0
      src/taipy/config/setup.py
  65. 165 0
      src/taipy/config/stubs/generate_pyi.py
  66. 25 0
      src/taipy/config/stubs/pyi_header.py
  67. 10 0
      src/taipy/config/tests/__init__.py
  68. 10 0
      src/taipy/config/tests/config/__init__.py
  69. 10 0
      src/taipy/config/tests/config/checker/__init__.py
  70. 10 0
      src/taipy/config/tests/config/checker/checkers/__init__.py
  71. 27 0
      src/taipy/config/tests/config/checker/checkers/test_checker.py
  72. 81 0
      src/taipy/config/tests/config/checker/checkers/test_config_checker.py
  73. 22 0
      src/taipy/config/tests/config/checker/test_default_config_checker.py
  74. 94 0
      src/taipy/config/tests/config/checker/test_issue_collector.py
  75. 10 0
      src/taipy/config/tests/config/common/__init__.py
  76. 138 0
      src/taipy/config/tests/config/common/test_argparser.py
  77. 27 0
      src/taipy/config/tests/config/common/test_classproperty.py
  78. 50 0
      src/taipy/config/tests/config/common/test_scope.py
  79. 198 0
      src/taipy/config/tests/config/common/test_template_handler.py
  80. 47 0
      src/taipy/config/tests/config/common/test_validate_id.py
  81. 49 0
      src/taipy/config/tests/config/conftest.py
  82. 10 0
      src/taipy/config/tests/config/global_app/__init__.py
  83. 46 0
      src/taipy/config/tests/config/global_app/test_global_app_config.py
  84. 152 0
      src/taipy/config/tests/config/test_compilation.py
  85. 356 0
      src/taipy/config/tests/config/test_config_comparator.py
  86. 48 0
      src/taipy/config/tests/config/test_default_config.py
  87. 58 0
      src/taipy/config/tests/config/test_env_file_config.py
  88. 42 0
      src/taipy/config/tests/config/test_file_config.py
  89. 200 0
      src/taipy/config/tests/config/test_override_config.py
  90. 47 0
      src/taipy/config/tests/config/test_section.py
  91. 169 0
      src/taipy/config/tests/config/test_section_registration.py
  92. 476 0
      src/taipy/config/tests/config/test_section_serialization.py
  93. 0 0
      src/taipy/config/tests/config/utils/__init__.py
  94. 18 0
      src/taipy/config/tests/config/utils/checker_for_tests.py
  95. 28 0
      src/taipy/config/tests/config/utils/named_temporary_file.py
  96. 69 0
      src/taipy/config/tests/config/utils/section_for_tests.py
  97. 98 0
      src/taipy/config/tests/config/utils/section_of_sections_list_for_tests.py
  98. 70 0
      src/taipy/config/tests/config/utils/unique_section_for_tests.py
  99. 10 0
      src/taipy/config/tests/logger/__init__.py
  100. 27 0
      src/taipy/config/tests/logger/logger.conf

+ 1 - 1
.github/workflows/linter.yml

@@ -19,7 +19,7 @@ jobs:
       - uses: ricardochaves/python-lint@v1.4.0
         with:
           use-pylint: false
-          use-mypy: false
+          use-isort: false
           extra-black-options: "--line-length=120"
           extra-pycodestyle-options: "--max-line-length=120 --ignore=E121,E123,E126,E226,E24,E704,W503,W504,E203"
           extra-mypy-options: "--ignore-missing-imports --implicit-optional --no-namespace-packages --exclude src/taipy/templates --follow-imports skip"

+ 12 - 0
src/taipy/_cli/_base_cli/__init__.py

@@ -0,0 +1,12 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from ._cli import _CLI

+ 77 - 0
src/taipy/_cli/_base_cli/_cli.py

@@ -0,0 +1,77 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import argparse
+from typing import Dict
+
+
+class _CLI:
+    """Argument parser for Taipy application."""
+
+    # The conflict_handler is set to "resolve" to override conflict arguments
+    _subparser_action = None
+    _parser = argparse.ArgumentParser(conflict_handler="resolve")
+
+    _sub_taipyparsers: Dict[str, argparse.ArgumentParser] = {}
+    _arg_groups: Dict[str, argparse._ArgumentGroup] = {}
+
+    @classmethod
+    def _add_subparser(cls, name: str, **kwargs) -> argparse.ArgumentParser:
+        """Create a new subparser and return a argparse handler."""
+        if subparser := cls._sub_taipyparsers.get(name):
+            return subparser
+
+        if not cls._subparser_action:
+            cls._subparser_action = cls._parser.add_subparsers()
+
+        subparser = cls._subparser_action.add_parser(
+            name=name,
+            conflict_handler="resolve",
+            **kwargs,
+        )
+        cls._sub_taipyparsers[name] = subparser
+        subparser.set_defaults(which=name)
+        return subparser
+
+    @classmethod
+    def _add_groupparser(cls, title: str, description: str = "") -> argparse._ArgumentGroup:
+        """Create a new group for arguments and return a argparser handler."""
+        if groupparser := cls._arg_groups.get(title):
+            return groupparser
+
+        groupparser = cls._parser.add_argument_group(title=title, description=description)
+        cls._arg_groups[title] = groupparser
+        return groupparser
+
+    @classmethod
+    def _parse(cls):
+        """Parse and return only known arguments."""
+        args, _ = cls._parser.parse_known_args()
+        return args
+
+    @classmethod
+    def _remove_argument(cls, arg: str):
+        """Remove an argument from the parser. Note that the `arg` must be without --.
+
+        Source: https://stackoverflow.com/questions/32807319/disable-remove-argument-in-argparse
+        """
+        for action in cls._parser._actions:
+            opts = action.option_strings
+            if (opts and opts[0] == arg) or action.dest == arg:
+                cls._parser._remove_action(action)
+                break
+
+        for argument_group in cls._parser._action_groups:
+            for group_action in argument_group._group_actions:
+                opts = group_action.option_strings
+                if (opts and opts[0] == arg) or group_action.dest == arg:
+                    argument_group._group_actions.remove(group_action)
+                    return

+ 14 - 0
src/taipy/config/.coveragerc

@@ -0,0 +1,14 @@
+[run]
+# uncomment the following to omit files during running
+#omit =
+[report]
+exclude_lines =
+    pragma: no cover
+    def __repr__
+    if self.debug:
+    if settings.DEBUG
+    raise AssertionError
+    raise NotImplementedError
+    if 0:
+    if __name__ == .__main__.:
+    def main

+ 22 - 0
src/taipy/config/.editorconfig

@@ -0,0 +1,22 @@
+# http://editorconfig.org
+
+root = true
+
+[*]
+indent_style = space
+indent_size = 4
+max_line_length = 120
+trim_trailing_whitespace = true
+insert_final_newline = true
+charset = utf-8
+end_of_line = lf
+
+[*.bat]
+indent_style = tab
+end_of_line = crlf
+
+[LICENSE]
+insert_final_newline = false
+
+[Makefile]
+indent_style = tab

+ 23 - 0
src/taipy/config/.flake8

@@ -0,0 +1,23 @@
+[flake8]
+# required by black, https://github.com/psf/black/blob/master/.flake8
+max-line-length = 120
+max-complexity = 18
+ignore = E203, E266, E501, E722, W503, F403, F401
+select = B,C,E,F,W,T4,B9
+docstring-convention = google
+per-file-ignores =
+    __init__.py:F401
+exclude =
+    .git,
+    __pycache__,
+    setup.py,
+    build,
+    dist,
+    releases,
+    .venv,
+    .tox,
+    .mypy_cache,
+    .pytest_cache,
+    .vscode,
+    .github,
+    tests

+ 2 - 0
src/taipy/config/.gitattributes

@@ -0,0 +1,2 @@
+# Set the default behavior, in case people don't have core.autocrlf set.
+* text=auto

+ 15 - 0
src/taipy/config/.github/ISSUE_TEMPLATE.md

@@ -0,0 +1,15 @@
+* Taipy version:
+* Python version:
+* Operating System:
+
+### Description
+
+Describe what you were trying to get done.
+Tell us what happened, what went wrong, and what you expected to happen.
+
+### What I Did
+
+```
+Paste the command(s) you ran and the output.
+If there was a crash, please include the traceback here.
+```

+ 40 - 0
src/taipy/config/.github/ISSUE_TEMPLATE/bug_report.md

@@ -0,0 +1,40 @@
+---
+name: Bug report
+about: Bug reports help improve the product quality.
+title: BUG-
+labels: bug
+assignees: ''
+
+---
+
+**Description**
+A complete and clear description of the problem.
+
+**How to reproduce**
+
+- Configuration files or code:
+    ```
+    from taipy import Config;
+
+
+    Config.configure_data_node(...)
+    ...
+    ```
+- A code fragment
+    ```
+    import taipy as tp;
+    tp.create_scenario(...)
+    ...
+    ```
+
+**Expected behavior**
+Description of what would be the expected outcome.
+
+**Screenshots**
+When available and relevant, screenshots better help show the problem.
+
+**Runtime environment**
+Please specify relevant indications.
+ - Taipy version:
+ - OS: [e.g. Linux, Windows] and version
+and any other relevant information.

+ 11 - 0
src/taipy/config/.github/ISSUE_TEMPLATE/feature-improvement.md

@@ -0,0 +1,11 @@
+---
+name: Feature improvement
+about: Feature improvements add extra functionality to an existing feature.
+title: ''
+labels: enhancement
+assignees: ''
+
+---
+
+**Description**
+What this improvement addresses (performance, API...).

+ 21 - 0
src/taipy/config/.github/ISSUE_TEMPLATE/new-feature.md

@@ -0,0 +1,21 @@
+---
+name: New feature
+about: Suggest a new feature for the product
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+**What would that feature address**
+Description of the lacking functionality that this issue would address.
+Ex: It is not possible to do this or that...
+
+***Description of the ideal solution***
+What would be the best way to provide that functionality
+
+***Caveats***
+What impact could that feature have on the rest of the product, and should be taken special care of?
+
+***Other options***
+What else could we do (workaround, third-party...)?

+ 38 - 0
src/taipy/config/.github/workflows/codeql-analysis.yml

@@ -0,0 +1,38 @@
+name: "CodeQL"
+
+on:
+  push:
+    branches: [ develop, dev/*, release/* ]
+  pull_request:
+    branches: [ develop, dev/*, release/* ]
+  schedule:
+    - cron: '22 15 * * 2'
+
+jobs:
+  analyze:
+    name: Analyze
+    runs-on: ubuntu-latest
+    permissions:
+      actions: read
+      contents: read
+      security-events: write
+
+    strategy:
+      fail-fast: false
+      matrix:
+        language: [ 'python' ]
+
+    steps:
+    - name: Checkout repository
+      uses: actions/checkout@v3
+
+    - name: Initialize CodeQL
+      uses: github/codeql-action/init@v2
+      with:
+        languages: ${{ matrix.language }}
+
+    - name: Autobuild
+      uses: github/codeql-action/autobuild@v2
+
+    - name: Perform CodeQL Analysis
+      uses: github/codeql-action/analyze@v2

+ 34 - 0
src/taipy/config/.github/workflows/coverage.yml

@@ -0,0 +1,34 @@
+name: Coverage
+
+on:
+  pull_request:
+    branches: [ develop, dev/*, release/* ]
+
+jobs:
+  backend-coverage:
+    timeout-minutes: 20
+    strategy:
+      matrix:
+        python-versions: [3.9]
+        os: [ubuntu-latest]
+    runs-on: ${{ matrix.os }}
+
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: ${{ matrix.python-versions }}
+
+      - name: Generate coverage report
+        env:
+          TOX_PARALLEL_NO_SPINNER: 1
+        run: |
+          pip install tox
+          tox -e coverage
+
+      - name: Code coverage
+        uses: orgoro/coverage@v2
+        with:
+          coverageFile: coverage.xml
+          token: ${{ secrets.GITHUB_TOKEN }}
+          thresholdAll: 0.85

+ 29 - 0
src/taipy/config/.github/workflows/generate_pyi.yml

@@ -0,0 +1,29 @@
+name: Generate config.pyi
+
+on:
+  pull_request:
+    branches: [ develop, dev/*, release/* ]
+
+jobs:
+  generate-pyi:
+    timeout-minutes: 20
+    strategy:
+      matrix:
+        python-versions: ['3.10']
+        os: [ubuntu-latest]
+    runs-on: ${{ matrix.os }}
+    permissions:
+      contents: write
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v2
+        with:
+          python-version: ${{ matrix.python-versions }}
+      - name: Clone taipy-core
+        run: git clone --single-branch --branch ${{ github.base_ref }} https://github.com/Avaiga/taipy-core.git
+      - name: Generate config.pyi
+        run: python stubs/generate_pyi.py
+      - name: Remove taipy-core folder
+        run: rm -fr taipy-core/
+      - uses: stefanzweifel/git-auto-commit-action@v4

+ 90 - 0
src/taipy/config/.github/workflows/publish.yml

@@ -0,0 +1,90 @@
+name: Publish on Pypi
+
+on:
+  workflow_dispatch:
+    inputs:
+      version:
+        description: "The tag of the package to publish on Pypi (ex: 1.0.0, 1.0.0.dev0)"
+        required: true
+
+jobs:
+  test-package:
+    timeout-minutes: 20
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v2
+        with:
+          python-version: 3.8
+
+      - name: Extract Github Tag Version
+        id: vars
+        run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
+
+      - name: Ensure package version is properly set
+        run: |
+          echo """
+          import json, sys, os
+          with open(f\"src{os.sep}taipy{os.sep}config{os.sep}version.json\") as version_file:
+            version_o = json.load(version_file)
+          version = f'{version_o.get(\"major\")}.{version_o.get(\"minor\")}.{version_o.get(\"patch\")}'
+          if vext := version_o.get(\"ext\"):
+            version = f'{version}.{vext}'
+          if version != sys.argv[1]:
+            raise ValueError(f\"Invalid version {version} / {sys.argv[1]}\")
+          if sys.argv[1] != sys.argv[2]:
+            raise ValueError(f\"Invalid tag version {sys.argv[2]} with package version {sys.argv[1]}\")
+          """ > /tmp/check.py
+          python /tmp/check.py "${{ github.event.inputs.version }}" "${{ steps.vars.outputs.tag }}"
+
+      - name: Download assets from github release tag
+        run: |
+          gh release download ${{ github.event.inputs.version }} --dir dist
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+      - name: Verify there is a release asset
+        run: |
+          if [ ! -f dist/${{ github.event.repository.name }}-${{ github.event.inputs.version }}.tar.gz ]; then
+            echo "No release asset found"
+            exit 1
+          fi
+
+  publish-to-pypi:
+    needs: [test-package]
+    timeout-minutes: 20
+    environment: publish
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Download assets from tag
+        run: |
+          gh release download ${{ github.event.inputs.version }} --dir dist
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+      - name: Publish to PyPI
+        uses: pypa/gh-action-pypi-publish@release/v1
+        with:
+          user: __token__
+          password: ${{ secrets.PYPI_API_TOKEN }}
+
+  test-published-package:
+    needs: [publish-to-pypi]
+    timeout-minutes: 30
+    strategy:
+      matrix:
+        python-versions: ['3.8','3.9','3.10']
+        os: [ubuntu-latest,windows-latest,macos-latest]
+    runs-on: ${{ matrix.os }}
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v2
+        with:
+          python-version: ${{ matrix.python-versions }}
+
+      - name: Install and test package
+        run: |
+          pip install --upgrade pip
+          pip install --no-cache-dir ${{ github.event.repository.name }}==${{ github.event.inputs.version }}

+ 105 - 0
src/taipy/config/.github/workflows/release-dev.yml

@@ -0,0 +1,105 @@
+name: Create Github Dev Release
+
+on:
+  workflow_dispatch:
+
+jobs:
+  release-dev-package:
+    timeout-minutes: 20
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v3
+        with:
+          ssh-key: ${{secrets.DEPLOY_KEY}}
+
+      - uses: actions/setup-python@v4
+        with:
+          python-version: 3.8
+
+      - name: Ensure package version has 'dev' suffix
+        run: |
+          echo """
+          import json, sys, os
+          SUFFIX = 'dev'
+          with open(f\"src{os.sep}taipy{os.sep}config{os.sep}version.json\") as version_file:
+              version_o = json.load(version_file)
+          version = f'{version_o.get(\"major\")}.{version_o.get(\"minor\")}.{version_o.get(\"patch\")}'
+          if vext := version_o.get(\"ext\"):
+              version = f'{version}.{vext}'
+          if SUFFIX not in version:
+              raise ValueError(f\"version {version} does not contain suffix {SUFFIX}\")
+          """ > /tmp/check1.py
+          python /tmp/check1.py
+
+      - name: Grab the version of the package
+        id: current-version
+        run: |
+          echo """
+          import json, os
+          with open(f\"src{os.sep}taipy{os.sep}config{os.sep}version.json\") as version_file:
+              version_o = json.load(version_file)
+          version = f'{version_o.get(\"major\")}.{version_o.get(\"minor\")}.{version_o.get(\"patch\")}'
+          if vext := version_o.get(\"ext\"):
+              version = f'{version}.{vext}'
+          print(f'VERSION={version}')
+          """ > /tmp/check2.py
+          python /tmp/check2.py >> $GITHUB_OUTPUT
+
+      - name: Install dependencies
+        run: |
+          python -m pip install --upgrade pip
+          pip install build
+
+      - name: Build package
+        run: python setup.py build_py && python -m build
+
+      - name: Install the package and test it
+        run: |
+          # Install package
+          echo "Installing package..."
+          pip install ./dist/${{ github.event.repository.name }}-${{ steps.current-version.outputs.VERSION }}.tar.gz
+
+      - name: Extract commit hash
+        shell: bash
+        run: echo "##[set-output name=HASH;]$(echo $(git rev-parse HEAD))"
+        id: extract_hash
+
+      - name: Create/update release and tag
+        run: |
+          echo "Creating release ${{ steps.current-version.outputs.VERSION }}"
+          gh release create ${{ steps.current-version.outputs.VERSION }} ./dist/${{ github.event.repository.name }}-${{ steps.current-version.outputs.VERSION }}.tar.gz --target ${{ steps.extract_hash.outputs.HASH }} --prerelease --title ${{ steps.current-version.outputs.VERSION }} --notes "Dev Release ${{ steps.current-version.outputs.VERSION }}"
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+      - name: Reset changes
+        run: |
+          git reset --hard HEAD
+          git clean -fdx
+
+      - name: Increase dev version
+        id: new-version
+        run: |
+          echo """
+          import json, os
+          with open(f'src{os.sep}taipy{os.sep}config{os.sep}version.json') as version_file:
+              version_o = json.load(version_file)
+              if version_o is None or 'dev' not in version_o['ext']:
+                  raise ValueError('Invalid version file. Version must contain dev suffix.')
+              prev_version = version_o['ext']
+              new_version = 'dev' + str(int(version_o['ext'].replace('dev', '')) + 1)
+              with open(f'src{os.sep}taipy{os.sep}config{os.sep}version.json') as r:
+                  text = r.read().replace(prev_version, new_version)
+              with open(f'src{os.sep}taipy{os.sep}config{os.sep}version.json', mode='w') as w:
+                  w.write(text)
+              with open(f\"src{os.sep}taipy{os.sep}config{os.sep}version.json\") as version_file:
+                  version_o = json.load(version_file)
+              version = f'{version_o.get(\"major\")}.{version_o.get(\"minor\")}.{version_o.get(\"patch\")}'
+              if vext := version_o.get(\"ext\"):
+                  version = f'{version}.{vext}'
+              print(f'VERSION={version}')
+          """ > /tmp/increase_dev_version.py
+          python /tmp/increase_dev_version.py >> $GITHUB_OUTPUT
+
+      - uses: stefanzweifel/git-auto-commit-action@v4
+        with:
+          commit_message: Update version to ${{ steps.new-version.outputs.VERSION }}

+ 69 - 0
src/taipy/config/.github/workflows/release.yml

@@ -0,0 +1,69 @@
+name: Create Github Release
+
+on:
+  workflow_dispatch:
+    inputs:
+      version:
+        description: "The release/package version to create (ex: 1.0.0)"
+        required: true
+
+jobs:
+  release-package:
+    timeout-minutes: 20
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v2
+        with:
+          python-version: 3.8
+
+      - name: Extract branch name
+        shell: bash
+        run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
+        id: extract_branch
+
+      - name: Ensure package version is properly set
+        run: |
+          echo """
+          import json, sys, os
+          with open(f\"src{os.sep}taipy{os.sep}config{os.sep}version.json\") as version_file:
+              version_o = json.load(version_file)
+          version = f'{version_o.get(\"major\")}.{version_o.get(\"minor\")}.{version_o.get(\"patch\")}'
+          if vext := version_o.get(\"ext\"):
+              version = f'{version}.{vext}'
+          if version != sys.argv[1]:
+              raise ValueError(f\"Invalid version {version} / {sys.argv[1]}\")
+          """ > /tmp/check1.py
+          python /tmp/check1.py "${{ github.event.inputs.version }}"
+
+      - name: Validate branch name
+        run: |
+          echo """
+          import json, sys, os
+          with open(f\"src{os.sep}taipy{os.sep}config{os.sep}version.json\") as version_file:
+              version = json.load(version_file)
+          if f'release/{version.get(\"major\")}.{version.get(\"minor\")}' != sys.argv[1]:
+              raise ValueError(f'Branch name mismatch: release/{version.get(\"major\")}.{version.get(\"minor\")} != {sys.argv[1]}')
+          """ > /tmp/check2.py
+          python /tmp/check2.py "${{ steps.extract_branch.outputs.branch }}"
+
+      - name: Install dependencies
+        run: |
+          python -m pip install --upgrade pip
+          pip install build
+
+      - name: Build and test the package
+        run: |
+          python setup.py build_py && python -m build
+          pip install dist/*.tar.gz
+
+      - name: Extract commit hash
+        shell: bash
+        run: echo "##[set-output name=hash;]$(echo $(git rev-parse HEAD))"
+        id: extract_hash
+
+      - name: Create/update release and tag
+        run: |
+            gh release create ${{ github.event.inputs.version }} ./dist/${{ github.event.repository.name }}-${{ github.event.inputs.version }}.tar.gz --target ${{ steps.extract_hash.outputs.hash }} --title ${{ github.event.inputs.version }}
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

+ 29 - 0
src/taipy/config/.github/workflows/setuptools.yml

@@ -0,0 +1,29 @@
+name: Setuptools
+
+on:
+  push:
+    branches: [ develop ]
+  pull_request:
+    branches: [ develop ]
+  schedule:
+    - cron: "0 8 * * *"
+
+jobs:
+  standard-packages:
+    timeout-minutes: 10
+    strategy:
+      matrix:
+        python-versions: [ '3.8', '3.9', '3.10', '3.11' ]
+        os: [ ubuntu-latest, windows-latest, macos-latest ]
+
+    runs-on: ${{ matrix.os }}
+
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: ${{ matrix.python-versions }}
+
+      - name: Install Taipy
+        run: |
+          pip install .

+ 37 - 0
src/taipy/config/.github/workflows/tests.yml

@@ -0,0 +1,37 @@
+name: Python tests
+
+on:
+  push:
+    branches: [ develop, dev/*, release/* ]
+  pull_request:
+    branches: [ develop, dev/*, release/* ]
+
+jobs:
+  backend:
+    timeout-minutes: 20
+    strategy:
+      matrix:
+        python-versions: ['3.8', '3.9', '3.10', '3.11']
+        os: [ubuntu-latest, windows-latest, macos-latest]
+    runs-on: ${{ matrix.os }}
+
+    steps:
+      - uses: actions/checkout@v2
+      - uses: actions/setup-python@v2
+        with:
+          python-version: ${{ matrix.python-versions }}
+
+      - name: Install Tox
+        run: pip install tox
+
+      - name: Run linter and all tests except Pyodbc's
+        if: matrix.python-versions == '3.10' && matrix.os == 'windows-latest'
+        env:
+          TOX_PARALLEL_NO_SPINNER: 1
+        run: tox -p all
+
+      - name: Run all tests
+        if: matrix.python-versions != '3.10' || matrix.os != 'windows-latest'
+        env:
+          TOX_PARALLEL_NO_SPINNER: 1
+        run: tox -e all-tests

+ 95 - 0
src/taipy/config/.gitignore

@@ -0,0 +1,95 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+Pipfile.lock
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# pyenv
+.python-version
+
+# dotenv
+*.env
+
+# virtualenv
+.venv
+venv/
+ENV/
+
+# mypy
+.mypy_cache/
+
+# IDE settings
+.vscode/
+.idea/
+.idea/taipy.iml
+.DS_Store
+
+# Demo Testing File
+object_selection.py
+dataset
+
+# Filesystem default local storage
+.data/
+
+# python notebook
+*.ipynb
+
+.airflow
+*.dags
+data_sources
+sequences
+tasks
+pickles
+
+*.sqlite3

+ 9 - 0
src/taipy/config/.isort.cfg

@@ -0,0 +1,9 @@
+[settings]
+multi_line_output = 3
+include_trailing_comma = True
+force_grid_wrap = 0
+use_parentheses = True
+ensure_newline_before_comments = True
+line_length = 120
+# you can skip files as below
+#skip_glob = docs/conf.py

+ 10 - 0
src/taipy/config/.license-header

@@ -0,0 +1,10 @@
+Copyright 2023 Avaiga Private Limited
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.

+ 46 - 0
src/taipy/config/.pre-commit-config.yaml

@@ -0,0 +1,46 @@
+repos:
+-   repo: https://github.com/pre-commit/mirrors-mypy
+    rev: 'v0.910'  # Use the sha / tag you want to point at
+    hooks:
+    -   id: mypy
+        additional_dependencies: [
+                'types-Markdown',
+                'types-python-dateutil',
+                'types-pytz',
+                'types-tzlocal',
+        ]
+-   repo: https://github.com/Lucas-C/pre-commit-hooks
+    rev: v1.1.13
+    hooks:
+    -   id: forbid-crlf
+    -   id: remove-crlf
+    -   id: forbid-tabs
+    -   id: remove-tabs
+    -   id: insert-license
+        files: \.py$
+        args:
+        - --license-filepath
+        - .license-header
+-   repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v4.0.1
+    hooks:
+    - id: trailing-whitespace
+    - id: end-of-file-fixer
+    - id: check-merge-conflict
+    - id: check-yaml
+      args: [--unsafe]
+-   repo: https://github.com/pre-commit/mirrors-isort
+    rev: v5.9.3
+    hooks:
+    - id: isort
+-   repo: https://github.com/ambv/black
+    rev: 22.3.0
+    hooks:
+    - id: black
+      args: [--line-length=120]
+      language_version: python3
+-   repo: https://gitlab.com/pycqa/flake8
+    rev: 3.9.2
+    hooks:
+    -   id: flake8
+        additional_dependencies: [flake8-typing-imports==1.10.0]

+ 128 - 0
src/taipy/config/CODE_OF_CONDUCT.md

@@ -0,0 +1,128 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, religion, or sexual identity
+and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+* Demonstrating empathy and kindness toward other people.
+* Being respectful of differing opinions, viewpoints, and experiences.
+* Giving and gracefully accepting constructive feedback.
+* Accepting responsibility and apologizing to those affected by our mistakes,
+  and learning from the experience.
+* Focusing on what is best not just for us as individuals, but for the
+  overall community.
+
+Examples of unacceptable behavior include:
+
+* The use of sexualized language or imagery, and sexual attention or
+  advances of any kind.
+* Trolling, insulting or derogatory comments, and personal or political attacks
+* Public or private harassment.
+* Publishing others' private information, such as a physical or email
+  address, without their explicit permission.
+* Other conduct which could reasonably be considered inappropriate in a
+  professional setting.
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement at
+rnd@avaiga.com.
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series
+of actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or
+permanent ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior,  harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within
+the community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.0, available at
+https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
+
+Community Impact Guidelines were inspired by [Mozilla's code of conduct
+enforcement ladder](https://github.com/mozilla/diversity).
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see the FAQ at
+https://www.contributor-covenant.org/faq. Translations are available at
+https://www.contributor-covenant.org/translations.

+ 135 - 0
src/taipy/config/CONTRIBUTING.md

@@ -0,0 +1,135 @@
+# Contributions
+
+Thanks for your interest in helping improve Taipy! Contributions are welcome, and they are greatly appreciated!
+Every little help and credit will always be given.
+
+There are multiple ways to contribute to Taipy: code, but also reporting bugs, creating feature requests, helping
+other users in our forums, [stack**overflow**](https://stackoverflow.com/), etc.
+
+Today the only way to communicate with the Taipy team is by GitHub issues.
+
+## Never contributed on an open source project before ?
+
+Have a look on this [GitHub documentation](https://docs.github.com/en/get-started/quickstart/contributing-to-projects).
+
+## Report bugs
+
+Reporting bugs is through [GitHub issues](https://github.com/Avaiga/taipy/issues).
+
+Please report relevant information and preferably code that exhibits the problem. We provide templates to help you
+describe the issue.
+
+The Taipy team will analyse and try to reproduce the bug to provide feedback. If confirmed, we will add a priority
+to the issue and add it in our backlog. Feel free to propose a pull request to fix it.
+
+## Issue reporting, feedback, proposal, design or any other comment
+
+Any feedback or proposal is greatly appreciated! Do not hesitate to create an issue with the appropriate template on
+[GitHub](https://github.com/Avaiga/taipy/issues).
+
+The Taipy team will analyse your issue and return to you as soon as possible.
+
+## Improve Documentation
+
+Do not hesitate to create an issue or pull request directly on the
+[taipy-doc repository](https://github.com/Avaiga/taipy-doc).
+
+## Implement Features
+
+The Taipy team manages its backlog in private. Each issue that will be done during our current sprint is
+attached to the `current sprint`. Please, do not work on it, the Taipy team is on it.
+
+## Code organisation
+
+Taipy is organised in five main repositories:
+
+- [taipy-config](https://github.com/Avaiga/taipy-config).
+- [taipy-core](https://github.com/Avaiga/taipy-core).
+- [taipy-gui](https://github.com/Avaiga/taipy-gui).
+- [taipy-rest](https://github.com/Avaiga/taipy-rest).
+- [taipy](https://github.com/Avaiga/taipy) brings previous packages in a single one.
+
+## Coding style and best practices
+
+### Python
+
+Taipy's repositories follow the [PEP 8](https://www.python.org/dev/peps/pep-0008/) and
+[PEP 484](https://www.python.org/dev/peps/pep-0484/) coding convention.
+
+## TypeScript
+
+Taipy's repositories use the [ESLint](https://eslint.org/) and
+[TypeScript ESLint](https://github.com/typescript-eslint/typescript-eslint) plugin to ensure a common set of rules.
+
+### Git branches
+
+All new development happens in the `develop` branch. All pull requests should target that branch.
+We are following a strict branch naming convention based on the pattern: `<type>/#<issueId>[IssueSummary]`.
+
+Where:
+
+- `<type>` would be one of:
+    - feature: new feature implementation, or improvement of a feature.
+    - bug: bug fix.
+    - review: change provoked by review comment not immediately taken care of.
+    - refactor: refactor of a piece of code.
+    - doc: doc changes (complement or typo fixes…).
+    - build: in relation with the build process.
+- `<issueId>` is the processed issue identifier. The advantage of explicitly indicating the issue number is that in
+  GitHub, a pull request page shows a direct link to the issue description.
+- `[IssueSummary]` is a short summary of the issue topic, not including spaces, using Camel case or lower-case,
+  dash-separated words. This summary, with its dash (‘-’) symbol prefix, is optional.
+
+
+## Contribution workflow
+
+Find an issue without the label `current sprint` and add a comment on it to inform the community that you are
+working on it.
+
+1. Make your [own fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) of the repository
+   target by the issue. Clone it on our local machine, then go inside the directory.
+
+2. We are working with [Pipenv](https://github.com/pypa/pipenv) for our virtualenv.
+   Create a local env and install development package by running `pipenv install --dev`, then run tests with `pipenv
+   run pytest` to verify your setup.
+
+3. For convention help, we provide a [pre-commit](https://pre-commit.com/hooks.html) file.
+   This tool will run before each commit and will automatically reformat code or raise warnings and errors based on the
+   code format or Python typing.
+   You can install and setup it up by doing:
+   ```
+     pipenv install pre-commit
+     pipenv run python -m pre-commit install
+   ```
+
+4. Make the changes.<br/>
+   You may want to also add your GitHub login as a new line of the `contributors.txt` file located at the root
+   of this repository. We are using it to list our contributors in the Taipy documentation
+   (see the "Contributing > Contributors" section) and thank them.
+
+5. Create a [pull request from your fork](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork).<br/>
+   Keep your pull request as __draft__ until your work is finished.
+   Do not hesitate to add a comment for help or questions.
+   Before you submit a pull request for review from your forked repo, check that it meets these guidelines:
+    - Include tests.
+    - Code is [rebase](http://stackoverflow.com/a/7244456/1110993).
+    - License is present.
+    - pre-commit works - without mypy error.
+    - GitHub's actions are passing.
+
+6. The taipy team will have a look at your Pull Request and will give feedback. If every requirement is valid, your
+   work will be added in the next release, congratulation!
+
+
+## Dependency management
+
+Taipy comes with multiple optional packages. You can find the list directly in the product or Taipy's packages.
+The back-end Pipfile does not install by default optional packages due to `pyodbc` requiring a driver's manual
+installation. This is not the behaviour for the front-end that installs all optional packages through its Pipfile.
+
+If you are a contributor on Taipy, be careful with dependencies, do not forget to install or uninstall depending on
+your issue.
+
+If you need to add a new dependency to Taipy, do not forget to add it in the `Pipfile` and the `setup.py`.
+Keep in mind that dependency is a vector of attack. The Taipy team limits the usage of external dependencies at the
+minimum.

+ 29 - 0
src/taipy/config/INSTALLATION.md

@@ -0,0 +1,29 @@
+# Installation
+
+The latest stable version of _taipy-config_ is available through _pip_:
+```
+pip install taipy-config
+```
+
+## Development version
+
+You can install the development version of _taipy-config_ with _pip_ and _git_:
+```
+pip install git+https://git@github.com/Avaiga/taipy-config
+```
+
+## Work with the _taipy-config_ code
+```
+git clone https://github.com/Avaiga/taipy-config.git
+cd taipy-config
+pip install .
+```
+
+If you want to run tests, please install `Pipenv`:
+```
+pip install pipenv
+git clone https://github.com/Avaiga/taipy-config.git
+cd taipy-config
+pipenv install --dev
+pipenv run pytest
+```

+ 202 - 0
src/taipy/config/LICENSE

@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 Avaiga Private Limited
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

+ 2 - 0
src/taipy/config/MANIFEST.in

@@ -0,0 +1,2 @@
+include src/taipy/config/*.pyi
+include src/taipy/config/*.json

+ 28 - 0
src/taipy/config/Pipfile

@@ -0,0 +1,28 @@
+[[source]]
+url = "https://pypi.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+toml = "==0.10"
+deepdiff = "==6.2.2"
+
+[dev-packages]
+black = "*"
+flake8 = "*"
+flake8-docstrings = "*"
+isort = "*"
+mypy = "*"
+pre-commit = "*"
+pytest = "*"
+pytest-cov = "*"
+pytest-mock = ">=3.6"
+tox = ">=3.24"
+types-toml = ">=0.10.0"
+autopep8 = "*"
+
+[requires]
+python_version = "3"
+
+[pipenv]
+allow_prereleases = true

+ 68 - 0
src/taipy/config/README.md

@@ -0,0 +1,68 @@
+# 🚧 Under construction 🚧
+
+WARNING: The Taipy team is performing a repository restructuration. This current repository taipy-config is about to be 
+merged into the main repository: taipy. Once the merge is done, the current code base will be in the 
+[taipy repository](https://github.com/Avaiga/taipy). The migration should take a maximum of a few days.
+<br>
+
+
+# Taipy config
+
+## License
+Copyright 2023 Avaiga Private Limited
+
+Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+[http://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0.txt)
+
+Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+specific language governing permissions and limitations under the License.
+
+## Usage
+- [License](#license)
+- [Usage](#usage)
+- [Taipy config](#what-is-taipy-config)
+- [Installation](#installation)
+- [Contributing](#contributing)
+- [Code of conduct](#code-of-conduct)
+- [Directory Structure](#directory-structure)
+
+## What is Taipy config
+
+Taipy is a Python library for creating Business Applications. More information on our
+[website](https://www.taipy.io). Taipy is split into multiple repositories including _taipy-config_ to let users
+install the minimum they need.
+
+[Taipy config](https://github.com/Avaiga/taipy-config) is dedicated to helping the user configure a Taipy application.
+
+More in-depth documentation of taipy can be found [here](https://docs.taipy.io).
+
+## Installation
+
+Want to install _Taipy config_? Check out our [`INSTALLATION.md`](INSTALLATION.md) file.
+
+## Contributing
+
+Want to help build _Taipy config_? Check out our [`CONTRIBUTING.md`](CONTRIBUTING.md) file.
+
+## Code of conduct
+
+Want to be part of the _Taipy config_ community? Check out our [`CODE_OF_CONDUCT.md`](CODE_OF_CONDUCT.md) file.
+
+## Directory Structure
+
+- `taipy/`:
+    - `config`: Configuration definition, management, and implementation. `config.config.Config` is the main
+      entrypoint for configuring a Taipy Core application.
+    - `logger`: Taipy logger.
+    - `tests`: Unit tests following the `taipy/` structure.
+- `CODE_OF_CONDUCT.md`: Code of conduct for members and contributors of _taipy-config_.
+- `CONTRIBUTING.md`: Instructions to contribute to _taipy-config_.
+- `INSTALLATION.md`: Instructions to install _taipy-config_.
+- `LICENSE`: The Apache 2.0 License.
+- `Pipfile`: File used by the Pipenv virtual environment to manage project dependencies.
+- `README.md`: Current file.
+- `contributors.txt`: The list of contributors.
+- `setup.py`: The setup script managing building, distributing, and installing _taipy-config_.
+- `tox.ini`: Contains test scenarios to be run.

+ 74 - 0
src/taipy/config/__init__.py

@@ -0,0 +1,74 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+"""# Taipy Config
+
+The Taipy Config package is a Python library designed to configure a Taipy application.
+
+The main entrypoint is the `Config^` singleton class. It exposes some methods to configure the
+Taipy application and some attributes to retrieve the configuration values.
+
+"""
+
+from typing import List
+
+from ._init import Config
+from .checker.issue import Issue
+from .checker.issue_collector import IssueCollector
+from .global_app.global_app_config import GlobalAppConfig
+from .section import Section
+from .unique_section import UniqueSection
+from .version import _get_version
+
+__version__ = _get_version()
+
+
+def _config_doc(func):
+    def func_with_doc(section, attribute_name, default, configuration_methods, add_to_unconflicted_sections=False):
+        import os
+
+        if os.environ.get("GENERATING_TAIPY_DOC", None) and os.environ["GENERATING_TAIPY_DOC"] == "true":
+            with open("config_doc.txt", "a") as f:
+                from inspect import signature
+
+                for exposed_configuration_method, configuration_method in configuration_methods:
+                    annotation = "    @staticmethod\n"
+                    sign = "    def " + exposed_configuration_method + str(signature(configuration_method)) + ":\n"
+                    doc = '        """' + configuration_method.__doc__ + '"""\n'
+                    content = "        pass\n\n"
+                    f.write(annotation + sign + doc + content)
+        return func(section, attribute_name, default, configuration_methods, add_to_unconflicted_sections)
+
+    return func_with_doc
+
+
+@_config_doc
+def _inject_section(
+    section_clazz,
+    attribute_name: str,
+    default: Section,
+    configuration_methods: List[tuple],
+    add_to_unconflicted_sections: bool = False,
+):
+    Config._register_default(default)
+
+    if issubclass(section_clazz, UniqueSection):
+        setattr(Config, attribute_name, Config.unique_sections[section_clazz.name])
+    elif issubclass(section_clazz, Section):
+        setattr(Config, attribute_name, Config.sections[section_clazz.name])
+    else:
+        raise TypeError
+
+    if add_to_unconflicted_sections:
+        Config._comparator._add_unconflicted_section(section_clazz.name)  # type: ignore
+
+    for exposed_configuration_method, configuration_method in configuration_methods:
+        setattr(Config, exposed_configuration_method, configuration_method)

+ 95 - 0
src/taipy/config/_config.py

@@ -0,0 +1,95 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from copy import copy
+from typing import Dict
+
+from .global_app.global_app_config import GlobalAppConfig
+from .section import Section
+from .unique_section import UniqueSection
+
+
+class _Config:
+    DEFAULT_KEY = "default"
+
+    def __init__(self):
+        self._sections: Dict[str, Dict[str, Section]] = {}
+        self._unique_sections: Dict[str, UniqueSection] = {}
+        self._global_config: GlobalAppConfig = GlobalAppConfig()
+
+    def _clean(self):
+        self._global_config._clean()
+        for unique_section in self._unique_sections.values():
+            unique_section._clean()
+        for sections in self._sections.values():
+            for section in sections.values():
+                section._clean()
+
+    @classmethod
+    def _default_config(cls):
+        config = _Config()
+        config._global_config = GlobalAppConfig.default_config()
+        return config
+
+    def _update(self, other_config):
+        self._global_config._update(other_config._global_config._to_dict())
+        if other_config._unique_sections:
+            for section_name, other_section in other_config._unique_sections.items():
+                if section := self._unique_sections.get(section_name, None):
+                    section._update(other_section._to_dict())
+                else:
+                    self._unique_sections[section_name] = copy(other_config._unique_sections[section_name])
+        if other_config._sections:
+            for section_name, other_non_unique_sections in other_config._sections.items():
+                if non_unique_sections := self._sections.get(section_name, None):
+                    self.__update_sections(non_unique_sections, other_non_unique_sections)
+                else:
+                    self._sections[section_name] = {}
+                    self.__add_sections(self._sections[section_name], other_non_unique_sections)
+
+    def __add_sections(self, entity_config, other_entity_configs):
+        for cfg_id, sub_config in other_entity_configs.items():
+            entity_config[cfg_id] = copy(sub_config)
+            self.__point_nested_section_to_self(sub_config)
+
+    def __update_sections(self, entity_config, other_entity_configs):
+        if self.DEFAULT_KEY in other_entity_configs:
+            if self.DEFAULT_KEY in entity_config:
+                entity_config[self.DEFAULT_KEY]._update(other_entity_configs[self.DEFAULT_KEY]._to_dict())
+            else:
+                entity_config[self.DEFAULT_KEY] = other_entity_configs[self.DEFAULT_KEY]
+        for cfg_id, sub_config in other_entity_configs.items():
+            if cfg_id != self.DEFAULT_KEY:
+                if cfg_id in entity_config:
+                    entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY))
+                else:
+                    entity_config[cfg_id] = copy(sub_config)
+                    entity_config[cfg_id]._update(sub_config._to_dict(), entity_config.get(self.DEFAULT_KEY))
+            self.__point_nested_section_to_self(sub_config)
+
+    def __point_nested_section_to_self(self, section):
+        """Loop through attributes of a Section to find if any attribute has a list of Section as value.
+        If there is, update each nested Section by the corresponding instance in self.
+
+        Args:
+            section (Section): The Section to search for nested sections.
+        """
+        for _, attr_value in vars(section).items():
+            # ! This will fail if an attribute is a dictionary, or nested list of Sections.
+            if not isinstance(attr_value, list):
+                continue
+
+            for index, item in enumerate(attr_value):
+                if not isinstance(item, Section):
+                    continue
+
+                if sub_item := self._sections.get(item.name, {}).get(item.id, None):
+                    attr_value[index] = sub_item

+ 10 - 0
src/taipy/config/_config_comparator/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 162 - 0
src/taipy/config/_config_comparator/_comparator_result.py

@@ -0,0 +1,162 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import re
+from typing import Dict, List, Set
+
+from .._serializer._json_serializer import _JsonSerializer
+
+
+class _ComparatorResult(dict):
+    ADDED_ITEMS_KEY = "added_items"
+    REMOVED_ITEMS_KEY = "removed_items"
+    MODIFIED_ITEMS_KEY = "modified_items"
+
+    CONFLICTED_SECTION_KEY = "conflicted_sections"
+    UNCONFLICTED_SECTION_KEY = "unconflicted_sections"
+
+    def __init__(self, unconflicted_sections: Set[str]):
+        super().__init__()
+
+        self._unconflicted_sections = unconflicted_sections
+
+    def _sort_by_section(self):
+        if self.get(self.CONFLICTED_SECTION_KEY):
+            for key in self[self.CONFLICTED_SECTION_KEY].keys():
+                self[self.CONFLICTED_SECTION_KEY][key].sort(key=lambda x: x[0][0])
+
+        if self.get(self.UNCONFLICTED_SECTION_KEY):
+            for key in self[self.UNCONFLICTED_SECTION_KEY].keys():
+                self[self.UNCONFLICTED_SECTION_KEY][key].sort(key=lambda x: x[0][0])
+
+    def _check_added_items(self, config_deepdiff, new_json_config):
+        if dictionary_item_added := config_deepdiff.get("dictionary_item_added"):
+            for item_added in dictionary_item_added:
+                section_name, config_id, attribute = self.__get_changed_entity_attribute(item_added)
+                diff_sections = self.__get_section(section_name)
+
+                if attribute:
+                    value_added = new_json_config[section_name][config_id][attribute]
+                elif config_id:
+                    value_added = new_json_config[section_name][config_id]
+                else:
+                    value_added = new_json_config[section_name]
+
+                section_name = self.__rename_global_node_name(section_name)
+                self.__create_or_append_list(
+                    diff_sections,
+                    self.ADDED_ITEMS_KEY,
+                    ((section_name, config_id, attribute), (value_added)),
+                )
+
+    def _check_removed_items(self, config_deepdiff, old_json_config):
+        if dictionary_item_removed := config_deepdiff.get("dictionary_item_removed"):
+            for item_removed in dictionary_item_removed:
+                section_name, config_id, attribute = self.__get_changed_entity_attribute(item_removed)
+                diff_sections = self.__get_section(section_name)
+
+                if attribute:
+                    value_removed = old_json_config[section_name][config_id][attribute]
+                elif config_id:
+                    value_removed = old_json_config[section_name][config_id]
+                else:
+                    value_removed = old_json_config[section_name]
+
+                section_name = self.__rename_global_node_name(section_name)
+                self.__create_or_append_list(
+                    diff_sections,
+                    self.REMOVED_ITEMS_KEY,
+                    ((section_name, config_id, attribute), (value_removed)),
+                )
+
+    def _check_modified_items(self, config_deepdiff, old_json_config, new_json_config):
+        if values_changed := config_deepdiff.get("values_changed"):
+            for item_changed, value_changed in values_changed.items():
+                section_name, config_id, attribute = self.__get_changed_entity_attribute(item_changed)
+                diff_sections = self.__get_section(section_name)
+
+                section_name = self.__rename_global_node_name(section_name)
+                self.__create_or_append_list(
+                    diff_sections,
+                    self.MODIFIED_ITEMS_KEY,
+                    ((section_name, config_id, attribute), (value_changed["old_value"], value_changed["new_value"])),
+                )
+
+        # Iterable item added will be considered a modified item
+        if iterable_item_added := config_deepdiff.get("iterable_item_added"):
+            self.__check_modified_iterable(iterable_item_added, old_json_config, new_json_config)
+
+        # Iterable item removed will be considered a modified item
+        if iterable_item_removed := config_deepdiff.get("iterable_item_removed"):
+            self.__check_modified_iterable(iterable_item_removed, old_json_config, new_json_config)
+
+    def __check_modified_iterable(self, iterable_items, old_json_config, new_json_config):
+        for item in iterable_items:
+            section_name, config_id, attribute = self.__get_changed_entity_attribute(item)
+            diff_sections = self.__get_section(section_name)
+
+            if attribute:
+                new_value = new_json_config[section_name][config_id][attribute]
+                old_value = old_json_config[section_name][config_id][attribute]
+            else:
+                new_value = new_json_config[section_name][config_id]
+                old_value = old_json_config[section_name][config_id]
+
+            section_name = self.__rename_global_node_name(section_name)
+            modified_value = ((section_name, config_id, attribute), (old_value, new_value))
+
+            if (
+                not diff_sections.get(self.MODIFIED_ITEMS_KEY)
+                or modified_value not in diff_sections[self.MODIFIED_ITEMS_KEY]
+            ):
+                self.__create_or_append_list(
+                    diff_sections,
+                    self.MODIFIED_ITEMS_KEY,
+                    modified_value,
+                )
+
+    def __get_section(self, section_name: str) -> Dict[str, List]:
+        if section_name in self._unconflicted_sections:
+            if not self.get(self.UNCONFLICTED_SECTION_KEY):
+                self[self.UNCONFLICTED_SECTION_KEY] = {}
+            return self[self.UNCONFLICTED_SECTION_KEY]
+
+        if not self.get(self.CONFLICTED_SECTION_KEY):
+            self[self.CONFLICTED_SECTION_KEY] = {}
+        return self[self.CONFLICTED_SECTION_KEY]
+
+    def __create_or_append_list(self, diff_dict, key, value):
+        if diff_dict.get(key):
+            diff_dict[key].append(value)
+        else:
+            diff_dict[key] = [value]
+
+    def __get_changed_entity_attribute(self, attribute_bracket_notation):
+        """Split the section name, the config id (if exists), and the attribute name (if exists)
+        from JSON bracket notation.
+        """
+        try:
+            section_name, config_id, attribute = re.findall(r"\[\'(.*?)\'\]", attribute_bracket_notation)
+        except ValueError:
+            try:
+                section_name, config_id = re.findall(r"\[\'(.*?)\'\]", attribute_bracket_notation)
+                attribute = None
+            except ValueError:
+                section_name = re.findall(r"\[\'(.*?)\'\]", attribute_bracket_notation)[0]
+                config_id = None
+                attribute = None
+
+        return section_name, config_id, attribute
+
+    def __rename_global_node_name(self, node_name):
+        if node_name == _JsonSerializer._GLOBAL_NODE_NAME:
+            return "Global Configuration"
+        return node_name

+ 183 - 0
src/taipy/config/_config_comparator/_config_comparator.py

@@ -0,0 +1,183 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import json
+from copy import copy
+from typing import Optional, Set, Union
+
+from deepdiff import DeepDiff
+
+from ...logger._taipy_logger import _TaipyLogger
+from .._config import _Config
+from .._serializer._json_serializer import _JsonSerializer
+from ._comparator_result import _ComparatorResult
+
+
+class _ConfigComparator:
+    def __init__(self):
+        self._unconflicted_sections: Set[str] = set()
+        self.__logger = _TaipyLogger._get_logger()
+
+    def _add_unconflicted_section(self, section_name: Union[str, Set[str]]):
+        if isinstance(section_name, str):
+            section_name = {section_name}
+
+        self._unconflicted_sections.update(section_name)
+
+    def _find_conflict_config(
+        self,
+        old_config: _Config,
+        new_config: _Config,
+        old_version_number: Optional[str] = None,
+        new_version_number: Optional[str] = None,
+    ):
+        """Compare between 2 _Config object to check for compatibility.
+
+        Args:
+            old_config (_Config): The old _Config.
+            new_config (_Config): The new _Config.
+            old_version_number (str, optional): The old version number for logging. Defaults to None.
+            new_version_number (str, optional): The new version number for logging. Defaults to None.
+
+        Returns:
+            _ComparatorResult: Return a _ComparatorResult dictionary with the following format:
+        ```python
+        {
+            "added_items": [
+                ((section_name_1, config_id_1, attribute_1), added_object_1),
+                ((section_name_2, config_id_2, attribute_2), added_object_2),
+            ],
+            "removed_items": [
+                ((section_name_1, config_id_1, attribute_1), removed_object_1),
+                ((section_name_2, config_id_2, attribute_2), removed_object_2),
+            ],
+            "modified_items": [
+                ((section_name_1, config_id_1, attribute_1), (old_value_1, new_value_1)),
+                ((section_name_2, config_id_2, attribute_2), (old_value_2, new_value_2)),
+            ],
+        }
+        ```
+        """
+        comparator_result = self.__get_config_diff(old_config, new_config)
+        self.__log_find_conflict_message(comparator_result, old_version_number, new_version_number)
+        return comparator_result
+
+    def _compare(
+        self,
+        config_1: _Config,
+        config_2: _Config,
+        version_number_1: str,
+        version_number_2: str,
+    ):
+        """Compare between 2 _Config object to check for compatibility.
+
+        Args:
+            config_1 (_Config): The old _Config.
+            config_2 (_Config): The new _Config.
+            version_number_1 (str): The old version number for logging.
+            version_number_2 (str): The new version number for logging.
+        """
+        comparator_result = self.__get_config_diff(config_1, config_2)
+        self.__log_comparison_message(comparator_result, version_number_1, version_number_2)
+
+        return comparator_result
+
+    def __get_config_diff(self, config_1, config_2):
+        json_config_1 = json.loads(_JsonSerializer._serialize(config_1))
+        json_config_2 = json.loads(_JsonSerializer._serialize(config_2))
+
+        config_deepdiff = DeepDiff(json_config_1, json_config_2, ignore_order=True)
+
+        comparator_result = _ComparatorResult(copy(self._unconflicted_sections))
+
+        comparator_result._check_added_items(config_deepdiff, json_config_2)
+        comparator_result._check_removed_items(config_deepdiff, json_config_1)
+        comparator_result._check_modified_items(config_deepdiff, json_config_1, json_config_2)
+        comparator_result._sort_by_section()
+
+        return comparator_result
+
+    def __log_comparison_message(
+        self,
+        comparator_result: _ComparatorResult,
+        version_number_1: str,
+        version_number_2: str,
+    ):
+        config_str_1 = f"version {version_number_1} Configuration"
+        config_str_2 = f"version {version_number_2} Configuration"
+
+        diff_messages = []
+        for _, sections in comparator_result.items():
+            diff_messages = self.__get_messages(sections)
+
+        if diff_messages:
+            self.__logger.info(
+                f"Differences between {config_str_1} and {config_str_2}:\n\t" + "\n\t".join(diff_messages)
+            )
+        else:
+            self.__logger.info(f"There is no difference between {config_str_1} and {config_str_2}.")
+
+    def __log_find_conflict_message(
+        self,
+        comparator_result: _ComparatorResult,
+        old_version_number: Optional[str] = None,
+        new_version_number: Optional[str] = None,
+    ):
+        old_config_str = (
+            f"configuration for version {old_version_number}" if old_version_number else "current configuration"
+        )
+        new_config_str = (
+            f"configuration for version {new_version_number}" if new_version_number else "current configuration"
+        )
+
+        if unconflicted_sections := comparator_result.get(_ComparatorResult.UNCONFLICTED_SECTION_KEY):
+            unconflicted_messages = self.__get_messages(unconflicted_sections)
+            self.__logger.info(
+                f"There are non-conflicting changes between the {old_config_str}"
+                f" and the {new_config_str}:\n\t" + "\n\t".join(unconflicted_messages)
+            )
+
+        if conflicted_sections := comparator_result.get(_ComparatorResult.CONFLICTED_SECTION_KEY):
+            conflicted_messages = self.__get_messages(conflicted_sections)
+            self.__logger.error(
+                f"The {old_config_str} conflicts with the {new_config_str}:\n\t" + "\n\t".join(conflicted_messages)
+            )
+
+    def __get_messages(self, diff_sections):
+        dq = '"'
+        messages = []
+
+        if added_items := diff_sections.get(_ComparatorResult.ADDED_ITEMS_KEY):
+            for diff in added_items:
+                ((section_name, config_id, attribute), added_object) = diff
+                messages.append(
+                    f"{section_name} {dq}{config_id}{dq} "
+                    f"{f'has attribute {dq}{attribute}{dq}' if attribute else 'was'} added: {added_object}"
+                )
+
+        if removed_items := diff_sections.get(_ComparatorResult.REMOVED_ITEMS_KEY):
+            for diff in removed_items:
+                ((section_name, config_id, attribute), removed_object) = diff
+                messages.append(
+                    f"{section_name} {dq}{config_id}{dq} "
+                    f"{f'has attribute {dq}{attribute}{dq}' if attribute else 'was'} removed"
+                )
+
+        if modified_items := diff_sections.get(_ComparatorResult.MODIFIED_ITEMS_KEY):
+            for diff in modified_items:
+                ((section_name, config_id, attribute), (old_value, new_value)) = diff
+                messages.append(
+                    f"{section_name} {dq}{config_id}{dq} "
+                    f"{f'has attribute {dq}{attribute}{dq}' if attribute else 'was'} modified: "
+                    f"{old_value} -> {new_value}"
+                )
+
+        return messages

+ 14 - 0
src/taipy/config/_init.py

@@ -0,0 +1,14 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from .common.frequency import Frequency
+from .common.scope import Scope
+from .config import Config

+ 10 - 0
src/taipy/config/_serializer/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 161 - 0
src/taipy/config/_serializer/_base_serializer.py

@@ -0,0 +1,161 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import inspect
+import re
+import types
+from abc import abstractmethod
+from datetime import datetime, timedelta
+from typing import Any, Dict, Optional
+
+from .._config import _Config
+from ..common._template_handler import _TemplateHandler
+from ..common._validate_id import _validate_id
+from ..common.frequency import Frequency
+from ..common.scope import Scope
+from ..exceptions.exceptions import LoadingError
+from ..global_app.global_app_config import GlobalAppConfig
+from ..section import Section
+from ..unique_section import UniqueSection
+
+
+class _BaseSerializer(object):
+    """Base serializer class for taipy configuration."""
+
+    _GLOBAL_NODE_NAME = "TAIPY"
+    _section_class = {_GLOBAL_NODE_NAME: GlobalAppConfig}
+
+    @classmethod
+    @abstractmethod
+    def _write(cls, configuration: _Config, filename: str):
+        raise NotImplementedError
+
+    @classmethod
+    def _str(cls, configuration: _Config):
+        config_as_dict = {cls._GLOBAL_NODE_NAME: configuration._global_config._to_dict()}
+        for u_sect_name, u_sect in configuration._unique_sections.items():
+            config_as_dict[u_sect_name] = u_sect._to_dict()
+        for sect_name, sections in configuration._sections.items():
+            config_as_dict[sect_name] = cls._to_dict(sections)
+        return cls._stringify(config_as_dict)
+
+    @classmethod
+    def _to_dict(cls, sections: Dict[str, Any]):
+        return {section_id: section._to_dict() for section_id, section in sections.items()}
+
+    @classmethod
+    def _stringify(cls, as_dict):
+        if as_dict is None:
+            return None
+        if isinstance(as_dict, Section):
+            return as_dict.id + ":SECTION"
+        if isinstance(as_dict, Scope):
+            return as_dict.name + ":SCOPE"
+        if isinstance(as_dict, Frequency):
+            return as_dict.name + ":FREQUENCY"
+        if isinstance(as_dict, bool):
+            return str(as_dict) + ":bool"
+        if isinstance(as_dict, int):
+            return str(as_dict) + ":int"
+        if isinstance(as_dict, float):
+            return str(as_dict) + ":float"
+        if isinstance(as_dict, datetime):
+            return as_dict.isoformat() + ":datetime"
+        if isinstance(as_dict, timedelta):
+            return cls._timedelta_to_str(as_dict) + ":timedelta"
+        if inspect.isfunction(as_dict) or isinstance(as_dict, types.BuiltinFunctionType):
+            return as_dict.__module__ + "." + as_dict.__name__ + ":function"
+        if inspect.isclass(as_dict):
+            return as_dict.__module__ + "." + as_dict.__qualname__ + ":class"
+        if isinstance(as_dict, dict):
+            return {str(key): cls._stringify(val) for key, val in as_dict.items()}
+        if isinstance(as_dict, list):
+            return [cls._stringify(val) for val in as_dict]
+        if isinstance(as_dict, tuple):
+            return [cls._stringify(val) for val in as_dict]
+        return as_dict
+
+    @staticmethod
+    def _extract_node(config_as_dict, cls_config, node, config: Optional[Any]) -> Dict[str, Section]:
+        res = {}
+        for key, value in config_as_dict.get(node, {}).items():  # my_task, {input=[], output=[my_data_node], ...}
+            key = _validate_id(key)
+            res[key] = cls_config._from_dict(value, key, config)  # if config is None else cls_config._from_dict(key,
+            # value, config)
+        return res
+
+    @classmethod
+    def _from_dict(cls, as_dict) -> _Config:
+        config = _Config()
+        config._global_config = GlobalAppConfig._from_dict(as_dict.get(cls._GLOBAL_NODE_NAME, {}))
+        for section_name, sect_as_dict in as_dict.items():
+            if section_class := cls._section_class.get(section_name, None):
+                if issubclass(section_class, UniqueSection):
+                    config._unique_sections[section_name] = section_class._from_dict(
+                        sect_as_dict, None, None
+                    )  # type: ignore
+                elif issubclass(section_class, Section):
+                    config._sections[section_name] = cls._extract_node(as_dict, section_class, section_name, config)
+        return config
+
+    @classmethod
+    def _pythonify(cls, val):
+        match = re.fullmatch(_TemplateHandler._PATTERN, str(val))
+        if not match:
+            if isinstance(val, str):
+                TYPE_PATTERN = (
+                    r"^(.+):(\bbool\b|\bstr\b|\bint\b|\bfloat\b|\bdatetime\b||\btimedelta\b|"
+                    r"\bfunction\b|\bclass\b|\bSCOPE\b|\bFREQUENCY\b|\bSECTION\b)?$"
+                )
+                match = re.fullmatch(TYPE_PATTERN, str(val))
+                if match:
+                    actual_val = match.group(1)
+                    dynamic_type = match.group(2)
+                    if dynamic_type == "SECTION":
+                        return actual_val
+                    if dynamic_type == "FREQUENCY":
+                        return Frequency[actual_val]
+                    if dynamic_type == "SCOPE":
+                        return Scope[actual_val]
+                    if dynamic_type == "bool":
+                        return _TemplateHandler._to_bool(actual_val)
+                    elif dynamic_type == "int":
+                        return _TemplateHandler._to_int(actual_val)
+                    elif dynamic_type == "float":
+                        return _TemplateHandler._to_float(actual_val)
+                    elif dynamic_type == "datetime":
+                        return _TemplateHandler._to_datetime(actual_val)
+                    elif dynamic_type == "timedelta":
+                        return _TemplateHandler._to_timedelta(actual_val)
+                    elif dynamic_type == "function":
+                        return _TemplateHandler._to_function(actual_val)
+                    elif dynamic_type == "class":
+                        return _TemplateHandler._to_class(actual_val)
+                    elif dynamic_type == "str":
+                        return actual_val
+                    else:
+                        error_msg = f"Error loading toml configuration at {val}. {dynamic_type} type is not supported."
+                        raise LoadingError(error_msg)
+            if isinstance(val, dict):
+                return {str(k): cls._pythonify(v) for k, v in val.items()}
+            if isinstance(val, list):
+                return [cls._pythonify(v) for v in val]
+        return val
+
+    @classmethod
+    def _timedelta_to_str(cls, obj: timedelta) -> str:
+        total_seconds = obj.total_seconds()
+        return (
+            f"{int(total_seconds // 86400)}d"
+            f"{int(total_seconds % 86400 // 3600)}h"
+            f"{int(total_seconds % 3600 // 60)}m"
+            f"{int(total_seconds % 60)}s"
+        )

+ 43 - 0
src/taipy/config/_serializer/_json_serializer.py

@@ -0,0 +1,43 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import json  # type: ignore
+
+from .._config import _Config
+from ..exceptions.exceptions import LoadingError
+from ._base_serializer import _BaseSerializer
+
+
+class _JsonSerializer(_BaseSerializer):
+    """Convert configuration from JSON representation to Python Dict and reciprocally."""
+
+    @classmethod
+    def _write(cls, configuration: _Config, filename: str):
+        with open(filename, "w") as fd:
+            json.dump(cls._str(configuration), fd, ensure_ascii=False, indent=0, check_circular=False)
+
+    @classmethod
+    def _read(cls, filename: str) -> _Config:
+        try:
+            with open(filename) as f:
+                config_as_dict = cls._pythonify(json.load(f))
+            return cls._from_dict(config_as_dict)
+        except json.JSONDecodeError as e:
+            error_msg = f"Can not load configuration {e}"
+            raise LoadingError(error_msg)
+
+    @classmethod
+    def _serialize(cls, configuration: _Config) -> str:
+        return json.dumps(cls._str(configuration), ensure_ascii=False, indent=0, check_circular=False)
+
+    @classmethod
+    def _deserialize(cls, config_as_string: str) -> _Config:
+        return cls._from_dict(cls._pythonify(dict(json.loads(config_as_string))))

+ 42 - 0
src/taipy/config/_serializer/_toml_serializer.py

@@ -0,0 +1,42 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import toml  # type: ignore
+
+from .._config import _Config
+from ..exceptions.exceptions import LoadingError
+from ._base_serializer import _BaseSerializer
+
+
+class _TomlSerializer(_BaseSerializer):
+    """Convert configuration from TOML representation to Python Dict and reciprocally."""
+
+    @classmethod
+    def _write(cls, configuration: _Config, filename: str):
+        with open(filename, "w") as fd:
+            toml.dump(cls._str(configuration), fd)
+
+    @classmethod
+    def _read(cls, filename: str) -> _Config:
+        try:
+            config_as_dict = cls._pythonify(dict(toml.load(filename)))
+            return cls._from_dict(config_as_dict)
+        except toml.TomlDecodeError as e:
+            error_msg = f"Can not load configuration {e}"
+            raise LoadingError(error_msg)
+
+    @classmethod
+    def _serialize(cls, configuration: _Config) -> str:
+        return toml.dumps(cls._str(configuration))
+
+    @classmethod
+    def _deserialize(cls, config_as_string: str) -> _Config:
+        return cls._from_dict(cls._pythonify(dict(toml.loads(config_as_string))))

+ 10 - 0
src/taipy/config/checker/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 32 - 0
src/taipy/config/checker/_checker.py

@@ -0,0 +1,32 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from typing import List
+
+from ._checkers._config_checker import _ConfigChecker
+from .issue_collector import IssueCollector
+
+
+class _Checker:
+    """Holds the various checkers to perform on the config."""
+
+    _checkers: List[_ConfigChecker] = []
+
+    @classmethod
+    def _check(cls, _applied_config):
+        collector = IssueCollector()
+        for checker in cls._checkers:
+            checker(_applied_config, collector)._check()
+        return collector
+
+    @classmethod
+    def add_checker(cls, checker_class: _ConfigChecker):
+        cls._checkers.append(checker_class)

+ 10 - 0
src/taipy/config/checker/_checkers/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 59 - 0
src/taipy/config/checker/_checkers/_auth_config_checker.py

@@ -0,0 +1,59 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from ..._config import _Config
+from ..issue_collector import IssueCollector
+from ._config_checker import _ConfigChecker
+
+
+class _AuthConfigChecker(_ConfigChecker):
+    def __init__(self, config: _Config, collector: IssueCollector):
+        super().__init__(config, collector)
+
+    def _check(self) -> IssueCollector:
+        auth_config = self._config._auth_config  # type: ignore
+        self._check_predefined_protocol(auth_config)
+        return self._collector
+
+    def _check_predefined_protocol(self, auth_config):
+        if auth_config.protocol == auth_config._PROTOCOL_LDAP:
+            self.__check_ldap(auth_config)
+        if auth_config.protocol == auth_config._PROTOCOL_TAIPY:
+            self.__check_taipy(auth_config)
+
+    def __check_taipy(self, auth_config):
+        if auth_config._TAIPY_ROLES not in auth_config.properties:
+            self._error(
+                "properties",
+                auth_config._LDAP_SERVER,
+                f"`{auth_config._LDAP_SERVER}` property must be populated when {auth_config._PROTOCOL_LDAP} is used.",
+            )
+        if auth_config._TAIPY_PWD not in auth_config.properties:
+            self._warning(
+                "properties",
+                auth_config._TAIPY_PWD,
+                f"`In order to protect authentication with passwords using {auth_config._PROTOCOL_TAIPY} protocol,"
+                f" {auth_config._TAIPY_PWD}` property can be populated.",
+            )
+
+    def __check_ldap(self, auth_config):
+        if auth_config._LDAP_SERVER not in auth_config.properties:
+            self._error(
+                "properties",
+                auth_config._LDAP_SERVER,
+                f"`{auth_config._LDAP_SERVER}` attribute must be populated when {auth_config._PROTOCOL_LDAP} is used.",
+            )
+        if auth_config._LDAP_BASE_DN not in auth_config.properties:
+            self._error(
+                "properties",
+                auth_config._LDAP_BASE_DN,
+                f"`{auth_config._LDAP_BASE_DN}` field must be populated when {auth_config._PROTOCOL_LDAP} is used.",
+            )

+ 81 - 0
src/taipy/config/checker/_checkers/_config_checker.py

@@ -0,0 +1,81 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import abc
+from typing import Any, List, Optional, Set
+
+from ..._config import _Config
+from ..issue_collector import IssueCollector
+
+
+class _ConfigChecker:
+    _PREDEFINED_PROPERTIES_KEYS = ["_entity_owner"]
+
+    def __init__(self, config: _Config, collector):
+        self._collector = collector
+        self._config = config
+
+    @abc.abstractmethod
+    def _check(self) -> IssueCollector:
+        raise NotImplementedError
+
+    def _error(self, field: str, value: Any, message: str):
+        self._collector._add_error(field, value, message, self.__class__.__name__)
+
+    def _warning(self, field: str, value: Any, message: str):
+        self._collector._add_warning(field, value, message, self.__class__.__name__)
+
+    def _info(self, field: str, value: Any, message: str):
+        self._collector._add_info(field, value, message, self.__class__.__name__)
+
+    def _check_children(
+        self,
+        parent_config_class,
+        config_id: str,
+        config_key: str,
+        config_value,
+        child_config_class,
+        can_be_empty: Optional[bool] = False,
+    ):
+        if not config_value and not can_be_empty:
+            self._warning(
+                config_key,
+                config_value,
+                f"{config_key} field of {parent_config_class.__name__} `{config_id}` is empty.",
+            )
+        else:
+            if not (
+                (isinstance(config_value, List) or isinstance(config_value, Set))
+                and all(map(lambda x: isinstance(x, child_config_class), config_value))
+            ):
+                self._error(
+                    config_key,
+                    config_value,
+                    f"{config_key} field of {parent_config_class.__name__} `{config_id}` must be populated with a list "
+                    f"of {child_config_class.__name__} objects.",
+                )
+
+    def _check_existing_config_id(self, config):
+        if not config.id:
+            self._error(
+                "config_id",
+                config.id,
+                f"config_id of {config.__class__.__name__} `{config.id}` is empty.",
+            )
+
+    def _check_if_entity_property_key_used_is_predefined(self, config):
+        for key, value in config._properties.items():
+            if key in self._PREDEFINED_PROPERTIES_KEYS:
+                self._error(
+                    key,
+                    value,
+                    f"Properties of {config.__class__.__name__} `{config.id}` cannot have `{key}` as its property.",
+                )

+ 42 - 0
src/taipy/config/checker/issue.py

@@ -0,0 +1,42 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from dataclasses import dataclass
+from typing import Any, Optional
+
+
+@dataclass
+class Issue:
+    """
+    An issue detected in the configuration.
+
+    Attributes:
+        level (str): Level of the issue among ERROR, WARNING, INFO.
+        field (str): Configuration field on which the issue has been detected.
+        value (Any): Value of the field on which the issue has been detected.
+        message (str): Human readable message to help the user fix the issue.
+        tag (Optional[str]): Optional tag to be used to filter issues.
+    """
+
+    level: str
+    field: str
+    value: Any
+    message: str
+    tag: Optional[str]
+
+    def __str__(self) -> str:
+        message = self.message
+
+        if self.value:
+            current_value_str = f'"{self.value}"' if isinstance(self.value, str) else f"{self.value}"
+            message += f" Current value of property `{self.field}` is {current_value_str}."
+
+        return message

+ 60 - 0
src/taipy/config/checker/issue_collector.py

@@ -0,0 +1,60 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from typing import Any, List
+
+from .issue import Issue
+
+
+class IssueCollector:
+    """
+    A collection of issues (instances of class `Issue^`).
+
+    Attributes:
+        errors (List[Issue^]): List of ERROR issues collected.
+        warnings (List[Issue^]): List WARNING issues collected.
+        infos (List[Issue^]): List INFO issues collected.
+        all (List[Issue^]): List of all issues collected ordered by decreasing level (ERROR, WARNING and INFO).
+    """
+
+    _ERROR_LEVEL = "ERROR"
+    _WARNING_LEVEL = "WARNING"
+    _INFO_LEVEL = "INFO"
+
+    def __init__(self):
+        self._errors: List[Issue] = []
+        self._warnings: List[Issue] = []
+        self._infos: List[Issue] = []
+
+    @property
+    def all(self) -> List[Issue]:
+        return self._errors + self._warnings + self._infos
+
+    @property
+    def infos(self) -> List[Issue]:
+        return self._infos
+
+    @property
+    def warnings(self) -> List[Issue]:
+        return self._warnings
+
+    @property
+    def errors(self) -> List[Issue]:
+        return self._errors
+
+    def _add_error(self, field: str, value: Any, message: str, checker_name: str):
+        self._errors.append(Issue(self._ERROR_LEVEL, field, value, message, checker_name))
+
+    def _add_warning(self, field: str, value: Any, message: str, checker_name: str):
+        self._warnings.append(Issue(self._WARNING_LEVEL, field, value, message, checker_name))
+
+    def _add_info(self, field: str, value: Any, message: str, checker_name: str):
+        self._infos.append(Issue(self._INFO_LEVEL, field, value, message, checker_name))

+ 10 - 0
src/taipy/config/common/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 18 - 0
src/taipy/config/common/_classproperty.py

@@ -0,0 +1,18 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+
+class _Classproperty(object):
+    def __init__(self, f):
+        self.f = f
+
+    def __get__(self, obj, owner):
+        return self.f(owner)

+ 50 - 0
src/taipy/config/common/_config_blocker.py

@@ -0,0 +1,50 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import functools
+
+from ...logger._taipy_logger import _TaipyLogger
+from ..exceptions.exceptions import ConfigurationUpdateBlocked
+
+
+class _ConfigBlocker:
+    """Configuration blocker singleton."""
+
+    __logger = _TaipyLogger._get_logger()
+    __block_config_update = False
+
+    @classmethod
+    def _block(cls):
+        cls.__block_config_update = True
+
+    @classmethod
+    def _unblock(cls):
+        cls.__block_config_update = False
+
+    @classmethod
+    def _check(cls):
+        def inner(f):
+            @functools.wraps(f)
+            def _check_if_is_blocking(*args, **kwargs):
+                if cls.__block_config_update:
+                    error_message = (
+                        "The Core service should be stopped by running core.stop() before"
+                        " modifying the Configuration. For more information, please refer to:"
+                        " https://docs.taipy.io/en/latest/manuals/running_services/#running-core."
+                    )
+                    cls.__logger.error("ConfigurationUpdateBlocked: " + error_message)
+                    raise ConfigurationUpdateBlocked(error_message)
+
+                return f(*args, **kwargs)
+
+            return _check_if_is_blocking
+
+        return inner

+ 20 - 0
src/taipy/config/common/_repr_enum.py

@@ -0,0 +1,20 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import functools
+from enum import Enum
+
+
+class _ReprEnum(Enum):
+    @classmethod
+    @functools.lru_cache
+    def _from_repr(cls, repr_: str):
+        return next(filter(lambda e: repr(e) == repr_, cls))  # type: ignore

+ 151 - 0
src/taipy/config/common/_template_handler.py

@@ -0,0 +1,151 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+import re
+from collections import UserDict
+from datetime import datetime, timedelta
+from importlib import import_module
+from operator import attrgetter
+from pydoc import locate
+
+from ..exceptions.exceptions import InconsistentEnvVariableError, MissingEnvVariableError
+from .frequency import Frequency
+from .scope import Scope
+
+
+class _TemplateHandler:
+    """Factory to handle actions related to config value templating."""
+
+    _PATTERN = r"^ENV\[([a-zA-Z_]\w*)\](:(\bbool\b|\bstr\b|\bfloat\b|\bint\b))?$"
+
+    @classmethod
+    def _replace_templates(cls, template, type=str, required=True, default=None):
+        if isinstance(template, tuple):
+            return tuple(cls._replace_template(item, type, required, default) for item in template)
+        if isinstance(template, list):
+            return [cls._replace_template(item, type, required, default) for item in template]
+        if isinstance(template, dict):
+            return {str(k): cls._replace_template(v, type, required, default) for k, v in template.items()}
+        if isinstance(template, UserDict):
+            return {str(k): cls._replace_template(v, type, required, default) for k, v in template.items()}
+        return cls._replace_template(template, type, required, default)
+
+    @classmethod
+    def _replace_template(cls, template, type, required, default):
+        if "ENV" not in str(template):
+            return template
+        match = re.fullmatch(cls._PATTERN, str(template))
+        if match:
+            var = match.group(1)
+            dynamic_type = match.group(3)
+            val = os.environ.get(var)
+            if val is None:
+                if required:
+                    raise MissingEnvVariableError(f"Environment variable {var} is not set.")
+                return default
+            if type == bool:
+                return cls._to_bool(val)
+            elif type == int:
+                return cls._to_int(val)
+            elif type == float:
+                return cls._to_float(val)
+            elif type == Scope:
+                return cls._to_scope(val)
+            elif type == Frequency:
+                return cls._to_frequency(val)
+            else:
+                if dynamic_type == "bool":
+                    return cls._to_bool(val)
+                elif dynamic_type == "int":
+                    return cls._to_int(val)
+                elif dynamic_type == "float":
+                    return cls._to_float(val)
+                return val
+        return template
+
+    @staticmethod
+    def _to_bool(val: str) -> bool:
+        possible_values = ["true", "false"]
+        if str.lower(val) not in possible_values:
+            raise InconsistentEnvVariableError("{val} is not a Boolean.")
+        return str.lower(val) == "true" or not (str.lower(val) == "false")
+
+    @staticmethod
+    def _to_int(val: str) -> int:
+        try:
+            return int(val)
+        except ValueError:
+            raise InconsistentEnvVariableError(f"{val} is not an integer.")
+
+    @staticmethod
+    def _to_float(val: str) -> float:
+        try:
+            return float(val)
+        except ValueError:
+            raise InconsistentEnvVariableError(f"{val} is not a float.")
+
+    @staticmethod
+    def _to_datetime(val: str) -> datetime:
+        try:
+            return datetime.fromisoformat(val)
+        except ValueError:
+            raise InconsistentEnvVariableError(f"{val} is not a valid datetime.")
+
+    @staticmethod
+    def _to_timedelta(val: str) -> timedelta:
+        """
+        Parse a time string e.g. (2h13m) into a timedelta object.
+
+        :param timedelta_str: A string identifying a duration.  (eg. 2h13m)
+        :return datetime.timedelta: A datetime.timedelta object
+        """
+        regex = re.compile(
+            r"^((?P<days>[\.\d]+?)d)? *"
+            r"((?P<hours>[\.\d]+?)h)? *"
+            r"((?P<minutes>[\.\d]+?)m)? *"
+            r"((?P<seconds>[\.\d]+?)s)?$"
+        )
+        parts = regex.match(val)
+        if not parts:
+            raise InconsistentEnvVariableError(f"{val} is not a valid timedelta.")
+        time_params = {name: float(param) for name, param in parts.groupdict().items() if param}
+        return timedelta(**time_params)  # type: ignore
+
+    @staticmethod
+    def _to_scope(val: str) -> Scope:
+        try:
+            return Scope[str.upper(val)]
+        except Exception:
+            raise InconsistentEnvVariableError(f"{val} is not a valid scope.")
+
+    @staticmethod
+    def _to_frequency(val: str) -> Frequency:
+        try:
+            return Frequency[str.upper(val)]
+        except Exception:
+            raise InconsistentEnvVariableError(f"{val} is not a valid frequency.")
+
+    @staticmethod
+    def _to_function(val: str):
+        module_name, fct_name = val.rsplit(".", 1)
+        try:
+            module = import_module(module_name)
+            return attrgetter(fct_name)(module)
+        except Exception:
+            raise InconsistentEnvVariableError(f"{val} is not a valid function.")
+
+    @staticmethod
+    def _to_class(val: str):
+        try:
+            return locate(val)
+        except Exception:
+            raise InconsistentEnvVariableError(f"{val} is not a valid class.")

+ 27 - 0
src/taipy/config/common/_validate_id.py

@@ -0,0 +1,27 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import keyword
+
+from ..exceptions.exceptions import InvalidConfigurationId
+
+__INVALID_TAIPY_ID_TERMS = ["CYCLE", "SCENARIO", "SEQUENCE", "TASK", "DATANODE"]
+
+
+def _validate_id(name: str):
+    for invalid_taipy_id_term in __INVALID_TAIPY_ID_TERMS:
+        if invalid_taipy_id_term in name:
+            raise InvalidConfigurationId(f"{name} is not a valid identifier. {invalid_taipy_id_term} is restricted.")
+
+    if name.isidentifier() and not keyword.iskeyword(name):
+        return name
+
+    raise InvalidConfigurationId(f"{name} is not a valid identifier.")

+ 45 - 0
src/taipy/config/common/frequency.py

@@ -0,0 +1,45 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from ..common._repr_enum import _ReprEnum
+
+
+class Frequency(_ReprEnum):
+    """Frequency of the recurrence of `Cycle^` and `Scenario^` objects.
+
+    The frequency must be provided in the `ScenarioConfig^`.
+
+    Each recurrent scenario is attached to the cycle corresponding to the creation date and the
+    frequency. In other words, each cycle represents an iteration and contains the various scenarios
+    created during this iteration.
+
+    For instance, when scenarios have a _MONTHLY_ frequency, one cycle will be created for each
+    month (January, February, March, etc.). A new scenario created on February 10th, gets
+    attached to the _February_ cycle.
+
+    The frequency is implemented as an enumeration with the following possible values:
+
+    - With a _DAILY_ frequency, a new cycle is created for each day.
+
+    - With a _WEEKLY_ frequency, a new cycle is created for each week (from Monday to Sunday).
+
+    - With a _MONTHLY_ frequency, a new cycle is created for each month.
+
+    - With a _QUARTERLY_ frequency, a new cycle is created for each quarter.
+
+    - With a _YEARLY_ frequency, a new cycle is created for each year.
+    """
+
+    DAILY = 1
+    WEEKLY = 2
+    MONTHLY = 3
+    QUARTERLY = 4
+    YEARLY = 5

+ 49 - 0
src/taipy/config/common/scope.py

@@ -0,0 +1,49 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from ..common._repr_enum import _ReprEnum
+
+
+class _OrderedEnum(_ReprEnum):
+    def __ge__(self, other):
+        if self.__class__ is other.__class__:
+            return self.value >= other.value
+        return NotImplemented
+
+    def __gt__(self, other):
+        if self.__class__ is other.__class__:
+            return self.value > other.value
+        return NotImplemented
+
+    def __le__(self, other):
+        if self.__class__ is other.__class__:
+            return self.value <= other.value
+        return NotImplemented
+
+    def __lt__(self, other):
+        if self.__class__ is other.__class__:
+            return self.value < other.value
+        return NotImplemented
+
+
+class Scope(_OrderedEnum):
+    """Scope of a `DataNode^`.
+
+    This enumeration can have the following values:
+
+    - `GLOBAL`
+    - `CYCLE`
+    - `SCENARIO`
+    """
+
+    GLOBAL = 3
+    CYCLE = 2
+    SCENARIO = 1

+ 250 - 0
src/taipy/config/config.py

@@ -0,0 +1,250 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+from typing import Dict
+
+from ..logger._taipy_logger import _TaipyLogger
+from ._config import _Config
+from ._config_comparator._config_comparator import _ConfigComparator
+from ._serializer._json_serializer import _JsonSerializer
+from ._serializer._toml_serializer import _TomlSerializer
+from .checker._checker import _Checker
+from .checker.issue_collector import IssueCollector
+from .common._classproperty import _Classproperty
+from .common._config_blocker import _ConfigBlocker
+from .global_app.global_app_config import GlobalAppConfig
+from .section import Section
+from .unique_section import UniqueSection
+
+
+class Config:
+    """Configuration singleton."""
+
+    _ENVIRONMENT_VARIABLE_NAME_WITH_CONFIG_PATH = "TAIPY_CONFIG_PATH"
+    __logger = _TaipyLogger._get_logger()
+    _default_config = _Config._default_config()
+    _python_config = _Config()
+    _file_config = _Config()
+    _env_file_config = _Config()
+    _applied_config = _Config()
+    _collector = IssueCollector()
+    _serializer = _TomlSerializer()
+    __json_serializer = _JsonSerializer()
+    _comparator: _ConfigComparator = _ConfigComparator()
+
+    @_Classproperty
+    def unique_sections(cls) -> Dict[str, UniqueSection]:
+        """Return all unique sections."""
+        return cls._applied_config._unique_sections
+
+    @_Classproperty
+    def sections(cls) -> Dict[str, Dict[str, Section]]:
+        """Return all non unique sections."""
+        return cls._applied_config._sections
+
+    @_Classproperty
+    def global_config(cls) -> GlobalAppConfig:
+        """Return configuration values related to the global application as a `GlobalAppConfig^`."""
+        return cls._applied_config._global_config
+
+    @classmethod
+    @_ConfigBlocker._check()
+    def load(cls, filename):
+        """Load a configuration file.
+
+        The current Python configuration is replaced and the Config compilation is triggered.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the toml configuration file to load.
+        """
+        cls.__logger.info(f"Loading configuration. Filename: '{filename}'")
+        cls._python_config = cls._serializer._read(filename)
+        cls._compile_configs()
+        cls.__logger.info(f"Configuration '{filename}' successfully loaded.")
+
+    @classmethod
+    def export(cls, filename):
+        """Export a configuration.
+
+        The export is done in a toml file.
+
+        The exported configuration is taken from the Python code configuration.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the file to export.
+        Note:
+            If *filename* already exists, it is overwritten.
+        """
+        cls._serializer._write(cls._python_config, filename)
+
+    @classmethod
+    def backup(cls, filename):
+        """Backup a configuration.
+
+        The backup is done in a toml file.
+
+        The backed up configuration is a compilation from the three possible methods to configure
+        the application: the Python code configuration, the file configuration and the environment
+        configuration.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the file to export.
+        Note:
+            If *filename* already exists, it is overwritten.
+        """
+        cls._serializer._write(cls._applied_config, filename)
+
+    @classmethod
+    @_ConfigBlocker._check()
+    def restore(cls, filename):
+        """Restore a configuration file and replace the current applied configuration.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the toml configuration file to load.
+        """
+        cls.__logger.info(f"Restoring configuration. Filename: '{filename}'")
+        cls._applied_config = cls._serializer._read(filename)
+        cls.__logger.info(f"Configuration '{filename}' successfully restored.")
+
+    @classmethod
+    @_ConfigBlocker._check()
+    def override(cls, filename):
+        """Load a configuration from a file and overrides the current config.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the toml configuration file to load.
+        """
+        cls.__logger.info(f"Loading configuration. Filename: '{filename}'")
+        cls._file_config = cls._serializer._read(filename)
+        cls.__logger.info("Overriding configuration.'")
+        cls._compile_configs()
+        cls.__logger.info(f"Configuration '{filename}' successfully loaded.")
+
+    @classmethod
+    def block_update(cls):
+        """Block update on the configuration signgleton."""
+        _ConfigBlocker._block()
+
+    @classmethod
+    def unblock_update(cls):
+        """Unblock update on the configuration signgleton."""
+        _ConfigBlocker._unblock()
+
+    @classmethod
+    @_ConfigBlocker._check()
+    def configure_global_app(cls, **properties) -> GlobalAppConfig:
+        """Configure the global application.
+
+        Parameters:
+            **properties (Dict[str, Any]): A dictionary of additional properties.
+        Returns:
+            The global application configuration.
+        """
+        glob_cfg = GlobalAppConfig(**properties)
+        if cls._python_config._global_config is None:
+            cls._python_config._global_config = glob_cfg
+        else:
+            cls._python_config._global_config._update(glob_cfg._to_dict())
+        cls._compile_configs()
+        return cls._applied_config._global_config
+
+    @classmethod
+    def check(cls) -> IssueCollector:
+        """Check configuration.
+
+        This method logs issue messages and returns an issue collector.
+
+        Returns:
+            Collector containing the info, warning and error issues.
+        """
+        cls._collector = _Checker._check(cls._applied_config)
+        cls.__log_message(cls)
+        return cls._collector
+
+    @classmethod
+    @_ConfigBlocker._check()
+    def _register_default(cls, default_section: Section):
+        if isinstance(default_section, UniqueSection):
+            if cls._default_config._unique_sections.get(default_section.name, None):
+                cls._default_config._unique_sections[default_section.name]._update(default_section._to_dict())
+            else:
+                cls._default_config._unique_sections[default_section.name] = default_section
+        else:
+            if def_sections := cls._default_config._sections.get(default_section.name, None):
+                def_sections[default_section.id] = default_section
+            else:
+                cls._default_config._sections[default_section.name] = {default_section.id: default_section}
+        cls._serializer._section_class[default_section.name] = default_section.__class__  # type: ignore
+        cls.__json_serializer._section_class[default_section.name] = default_section.__class__  # type: ignore
+        cls._compile_configs()
+
+    @classmethod
+    @_ConfigBlocker._check()
+    def _register(cls, section):
+        if isinstance(section, UniqueSection):
+            if cls._python_config._unique_sections.get(section.name, None):
+                cls._python_config._unique_sections[section.name]._update(section._to_dict())
+            else:
+                cls._python_config._unique_sections[section.name] = section
+        else:
+            if sections := cls._python_config._sections.get(section.name, None):
+                if sections.get(section.id, None):
+                    sections[section.id]._update(section._to_dict())
+                else:
+                    sections[section.id] = section
+            else:
+                cls._python_config._sections[section.name] = {section.id: section}
+        cls._serializer._section_class[section.name] = section.__class__
+        cls.__json_serializer._section_class[section.name] = section.__class__
+        cls._compile_configs()
+
+    @classmethod
+    def _override_env_file(cls):
+        if config_filename := os.environ.get(cls._ENVIRONMENT_VARIABLE_NAME_WITH_CONFIG_PATH):
+            cls.__logger.info(f"Loading configuration provided by environment variable. Filename: '{config_filename}'")
+            cls._env_file_config = cls._serializer._read(config_filename)
+            cls.__logger.info(f"Configuration '{config_filename}' successfully loaded.")
+
+    @classmethod
+    def _compile_configs(cls):
+        Config._override_env_file()
+        cls._applied_config._clean()
+        if cls._default_config:
+            cls._applied_config._update(cls._default_config)
+        if cls._python_config:
+            cls._applied_config._update(cls._python_config)
+        if cls._file_config:
+            cls._applied_config._update(cls._file_config)
+        if cls._env_file_config:
+            cls._applied_config._update(cls._env_file_config)
+
+    @classmethod
+    def __log_message(cls, config):
+        for issue in config._collector._warnings:
+            cls.__logger.warning(str(issue))
+        for issue in config._collector._infos:
+            cls.__logger.info(str(issue))
+        for issue in config._collector._errors:
+            cls.__logger.error(str(issue))
+        if len(config._collector._errors) != 0:
+            raise SystemExit("Configuration errors found. Please check the error log for more information.")
+
+    @classmethod
+    def _to_json(cls, _config: _Config) -> str:
+        return cls.__json_serializer._serialize(_config)
+
+    @classmethod
+    def _from_json(cls, config_as_str: str) -> _Config:
+        return cls.__json_serializer._deserialize(config_as_str)
+
+
+Config._override_env_file()

+ 836 - 0
src/taipy/config/config.pyi

@@ -0,0 +1,836 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import json
+from datetime import timedelta
+from typing import Any, Callable, Dict, List, Optional, Union
+
+from taipy.config._config import _Config
+from taipy.core.config import CoreSection, DataNodeConfig, JobConfig, MigrationConfig, ScenarioConfig, TaskConfig
+
+from .checker.issue_collector import IssueCollector
+from .common._classproperty import _Classproperty
+from .common._config_blocker import _ConfigBlocker
+from .common.frequency import Frequency
+from .common.scope import Scope
+from .global_app.global_app_config import GlobalAppConfig
+from .section import Section
+from .unique_section import UniqueSection
+
+class Config:
+    """Configuration singleton."""
+
+    @_Classproperty
+    def unique_sections(cls) -> Dict[str, UniqueSection]:
+        """Return all unique sections."""
+    @_Classproperty
+    def sections(cls) -> Dict[str, Dict[str, Section]]:
+        """Return all non unique sections."""
+    @_Classproperty
+    def global_config(cls) -> GlobalAppConfig:
+        """Return configuration values related to the global application as a `GlobalAppConfig^`."""
+    @classmethod
+    @_ConfigBlocker._check()
+    def load(cls, filename):
+        """Load a configuration file.
+
+        The current Python configuration is replaced and the Config compilation is triggered.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the toml configuration file to load.
+        """
+    @classmethod
+    def export(cls, filename):
+        """Export a configuration.
+
+        The export is done in a toml file.
+
+        The exported configuration is taken from the Python code configuration.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the file to export.
+        Note:
+            If *filename* already exists, it is overwritten.
+        """
+    @classmethod
+    def backup(cls, filename):
+        """Backup a configuration.
+
+        The backup is done in a toml file.
+
+        The backed up configuration is a compilation from the three possible methods to configure
+        the application: the Python code configuration, the file configuration and the environment
+        configuration.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the file to export.
+        Note:
+            If *filename* already exists, it is overwritten.
+        """
+    @classmethod
+    @_ConfigBlocker._check()
+    def restore(cls, filename):
+        """Restore a configuration file and replace the current applied configuration.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the toml configuration file to load.
+        """
+    @classmethod
+    @_ConfigBlocker._check()
+    def override(cls, filename):
+        """Load a configuration from a file and overrides the current config.
+
+        Parameters:
+            filename (Union[str, Path]): The path of the toml configuration file to load.
+        """
+    @classmethod
+    def block_update(cls):
+        """Block update on the configuration signgleton."""
+    @classmethod
+    def unblock_update(cls):
+        """Unblock update on the configuration signgleton."""
+    @classmethod
+    @_ConfigBlocker._check()
+    def configure_global_app(cls, **properties) -> GlobalAppConfig:
+        """Configure the global application.
+
+        Parameters:
+            **properties (Dict[str, Any]): A dictionary of additional properties.
+        Returns:
+            The global application configuration.
+        """
+    @classmethod
+    def check(cls) -> IssueCollector:
+        """Check configuration.
+
+        This method logs issue messages and returns an issue collector.
+
+        Returns:
+            Collector containing the info, warning and error issues.
+        """
+    @classmethod
+    @_ConfigBlocker._check()
+    def _register_default(cls, default_section: Section):
+        """"""
+    @classmethod
+    @_ConfigBlocker._check()
+    def _register(cls, section):
+        """"""
+    @classmethod
+    def _override_env_file(cls):
+        """"""
+    @classmethod
+    def _compile_configs(cls):
+        """"""
+    @classmethod
+    def _to_json(cls, _config: _Config) -> str:
+        """"""
+    @classmethod
+    def _from_json(cls, config_as_str: str) -> _Config:
+        """"""
+    @_Classproperty
+    def job_config(cls) -> JobConfig:
+        """"""
+    @_Classproperty
+    def data_nodes(cls) -> Dict[str, DataNodeConfig]:
+        """"""
+    @_Classproperty
+    def tasks(cls) -> Dict[str, TaskConfig]:
+        """"""
+    @_Classproperty
+    def scenarios(cls) -> Dict[str, ScenarioConfig]:
+        """"""
+    @_Classproperty
+    def migration_functions(cls) -> Dict[str, MigrationConfig]:
+        """"""
+    @_Classproperty
+    def core(cls) -> Dict[str, CoreSection]:
+        """"""
+    @staticmethod
+    def configure_scenario(
+        id: str,
+        task_configs: Optional[List[TaskConfig]] = None,
+        additional_data_node_configs: Optional[List[DataNodeConfig]] = None,
+        frequency: Optional[Frequency] = None,
+        comparators: Optional[Dict[str, Union[List[Callable], Callable]]] = None,
+        sequences: Optional[Dict[str, List[TaskConfig]]] = None,
+        **properties,
+    ) -> "ScenarioConfig":
+        """Configure a new scenario configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new scenario configuration.
+            task_configs (Optional[List[TaskConfig^]]): The list of task configurations used by this
+                scenario configuration. The default value is None.
+            additional_data_node_configs (Optional[List[DataNodeConfig^]]): The list of additional data nodes
+                related to this scenario configuration. The default value is None.
+            frequency (Optional[Frequency^]): The scenario frequency.<br/>
+                It corresponds to the recurrence of the scenarios instantiated from this
+                configuration. Based on this frequency each scenario will be attached to the
+                relevant cycle.
+            comparators (Optional[Dict[str, Union[List[Callable], Callable]]]): The list of
+                functions used to compare scenarios. A comparator function is attached to a
+                scenario's data node configuration. The key of the dictionary parameter
+                corresponds to the data node configuration id. During the scenarios'
+                comparison, each comparator is applied to all the data nodes instantiated from
+                the data node configuration attached to the comparator. See
+                `(taipy.)compare_scenarios()^` more more details.
+            sequences (Optional[Dict[str, List[TaskConfig]]]): Dictionary of sequence descriptions.
+                The default value is None.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new scenario configuration.
+        """
+    @staticmethod
+    def set_default_scenario_configuration(
+        task_configs: Optional[List[TaskConfig]] = None,
+        additional_data_node_configs: List[DataNodeConfig] = None,
+        frequency: Optional[Frequency] = None,
+        comparators: Optional[Dict[str, Union[List[Callable], Callable]]] = None,
+        sequences: Optional[Dict[str, List[TaskConfig]]] = None,
+        **properties,
+    ) -> "ScenarioConfig":
+        """Set the default values for scenario configurations.
+
+        This function creates the *default scenario configuration* object,
+        where all scenario configuration objects will find their default
+        values when needed.
+
+        Parameters:
+            task_configs (Optional[List[TaskConfig^]]): The list of task configurations used by this
+                scenario configuration.
+            additional_data_node_configs (Optional[List[DataNodeConfig^]]): The list of additional data nodes
+                related to this scenario configuration.
+            frequency (Optional[Frequency^]): The scenario frequency.
+                It corresponds to the recurrence of the scenarios instantiated from this
+                configuration. Based on this frequency each scenario will be attached to
+                the relevant cycle.
+            comparators (Optional[Dict[str, Union[List[Callable], Callable]]]): The list of
+                functions used to compare scenarios. A comparator function is attached to a
+                scenario's data node configuration. The key of the dictionary parameter
+                corresponds to the data node configuration id. During the scenarios'
+                comparison, each comparator is applied to all the data nodes instantiated from
+                the data node configuration attached to the comparator. See
+                `taipy.compare_scenarios()^` more more details.
+            sequences (Optional[Dict[str, List[TaskConfig]]]): Dictionary of sequences. The default value is None.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new default scenario configuration.
+        """
+    @staticmethod
+    def set_default_data_node_configuration(
+        storage_type: str, scope: Optional[Scope] = None, validity_period: Optional[timedelta] = None, **properties
+    ) -> "DataNodeConfig":
+        """Set the default values for data node configurations.
+
+        This function creates the _default data node configuration_ object,
+        where all data node configuration objects will find their default
+        values when needed.
+
+        Parameters:
+            storage_type (str): The default storage type for all data node configurations.
+                The possible values are *"pickle"* (the default value), *"csv"*, *"excel"*,
+                *"sql"*, *"mongo_collection"*, *"in_memory"*, *"json"*, *"parquet"* or
+                *"generic"*.
+            scope (Optional[Scope^]): The default scope for all data node configurations.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The default data node configuration.
+        """
+    @classmethod
+    def configure_data_node_from(
+        cls,
+        source_configuration: "DataNodeConfig",
+        id: str,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new data node configuration from an existing one.
+
+        Parameters:
+            source_configuration (DataNodeConfig): The source data node configuration.
+            id (str): The unique identifier of the new data node configuration.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.<br/>
+                The default properties are the properties of the source data node configuration.
+
+        Returns:
+            The new data node configuration.
+        """
+    @classmethod
+    def configure_data_node(
+        cls,
+        id: str,
+        storage_type: Optional[str] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new data node configuration.
+            storage_type (Optional[str]): The data node configuration storage type. The possible values
+                are None (which is the default value of *"pickle"*, unless it has been overloaded by the
+                *storage_type* value set in the default data node configuration
+                (see `(Config.)set_default_data_node_configuration()^`)), *"pickle"*, *"csv"*, *"excel"*,
+                *"sql_table"*, *"sql"*, *"json"*, *"parquet"*, *"mongo_collection"*, *"in_memory"*, or
+                *"generic"*.
+            scope (Optional[Scope^]): The scope of the data node configuration.<br/>
+                The default value is `Scope.SCENARIO` (or the one specified in
+                `(Config.)set_default_data_node_configuration()^`).
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new data node configuration.
+        """
+    @classmethod
+    def configure_csv_data_node(
+        cls,
+        id: str,
+        default_path: Optional[str] = None,
+        encoding: Optional[str] = None,
+        has_header: Optional[bool] = None,
+        exposed_type: Optional[str] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new CSV data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new CSV data node configuration.
+            default_path (Optional[str]): The default path of the CSV file.
+            encoding (Optional[str]): The encoding of the CSV file.
+            has_header (Optional[bool]): If True, indicates that the CSV file has a header.
+            exposed_type (Optional[str]): The exposed type of the data read from CSV file.<br/>
+                The default value is `pandas`.
+            scope (Optional[Scope^]): The scope of the CSV data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new CSV data node configuration.
+        """
+    @classmethod
+    def configure_json_data_node(
+        cls,
+        id: str,
+        default_path: Optional[str] = None,
+        encoding: Optional[str] = None,
+        encoder: Optional[json.JSONEncoder] = None,
+        decoder: Optional[json.JSONDecoder] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new JSON data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new JSON data node configuration.
+            default_path (Optional[str]): The default path of the JSON file.
+            encoding (Optional[str]): The encoding of the JSON file.
+            encoder (Optional[json.JSONEncoder]): The JSON encoder used to write data into the JSON file.
+            decoder (Optional[json.JSONDecoder]): The JSON decoder used to read data from the JSON file.
+            scope (Optional[Scope^]): The scope of the JSON data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+        Returns:
+            The new JSON data node configuration.
+        """
+    @classmethod
+    def configure_parquet_data_node(
+        cls,
+        id: str,
+        default_path: Optional[str] = None,
+        engine: Optional[str] = None,
+        compression: Optional[str] = None,
+        read_kwargs: Optional[Dict] = None,
+        write_kwargs: Optional[Dict] = None,
+        exposed_type: Optional[str] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new Parquet data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new Parquet data node configuration.
+            default_path (Optional[str]): The default path of the Parquet file.
+            engine (Optional[str]): Parquet library to use. Possible values are *"fastparquet"* or
+                *"pyarrow"*.<br/>
+                The default value is *"pyarrow"*.
+            compression (Optional[str]): Name of the compression to use. Possible values are *"snappy"*,
+                *"gzip"*, *"brotli"*, or *"none"* (no compression). The default value is *"snappy"*.
+            read_kwargs (Optional[dict]): Additional parameters passed to the `pandas.read_parquet()`
+                function.
+            write_kwargs (Optional[dict]): Additional parameters passed to the
+                `pandas.DataFrame.write_parquet()` function.<br/>
+                The parameters in *read_kwargs* and *write_kwargs* have a **higher precedence** than the
+                top-level parameters which are also passed to Pandas.
+            exposed_type (Optional[str]): The exposed type of the data read from Parquet file.<br/>
+                The default value is `pandas`.
+            scope (Optional[Scope^]): The scope of the Parquet data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new Parquet data node configuration.
+        """
+    @classmethod
+    def configure_excel_data_node(
+        cls,
+        id: str,
+        default_path: Optional[str] = None,
+        has_header: Optional[bool] = None,
+        sheet_name: Optional[Union[List[str], str]] = None,
+        exposed_type: Optional[str] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new Excel data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new Excel data node configuration.
+            default_path (Optional[str]): The path of the Excel file.
+            has_header (Optional[bool]): If True, indicates that the Excel file has a header.
+            sheet_name (Optional[Union[List[str], str]]): The list of sheet names to be used.
+                This can be a unique name.
+            exposed_type (Optional[str]): The exposed type of the data read from Excel file.<br/>
+                The default value is `pandas`.
+            scope (Optional[Scope^]): The scope of the Excel data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new Excel data node configuration.
+        """
+    @classmethod
+    def configure_generic_data_node(
+        cls,
+        id: str,
+        read_fct: Optional[Callable] = None,
+        write_fct: Optional[Callable] = None,
+        read_fct_args: Optional[List] = None,
+        write_fct_args: Optional[List] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new generic data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new generic data node configuration.
+            read_fct (Optional[Callable]): The Python function called to read the data.
+            write_fct (Optional[Callable]): The Python function called to write the data.
+                The provided function must have at least one parameter that receives the data to be written.
+            read_fct_args (Optional[List]): The list of arguments that are passed to the function
+                *read_fct* to read data.
+            write_fct_args (Optional[List]): The list of arguments that are passed to the function
+                *write_fct* to write the data.
+            scope (Optional[Scope^]): The scope of the Generic data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+        Returns:
+            The new Generic data node configuration.
+        """
+    @classmethod
+    def configure_in_memory_data_node(
+        cls,
+        id: str,
+        default_data: Optional[Any] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new *in-memory* data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new in_memory data node configuration.
+            default_data (Optional[any]): The default data of the data nodes instantiated from
+                this in_memory data node configuration.
+            scope (Optional[Scope^]): The scope of the in_memory data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new *in-memory* data node configuration.
+        """
+    @classmethod
+    def configure_pickle_data_node(
+        cls,
+        id: str,
+        default_path: Optional[str] = None,
+        default_data: Optional[Any] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new pickle data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new pickle data node configuration.
+            default_path (Optional[str]): The path of the pickle file.
+            default_data (Optional[any]): The default data of the data nodes instantiated from
+                this pickle data node configuration.
+            scope (Optional[Scope^]): The scope of the pickle data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new pickle data node configuration.
+        """
+    @classmethod
+    def configure_sql_table_data_node(
+        cls,
+        id: str,
+        db_name: str,
+        db_engine: str,
+        table_name: str,
+        db_username: Optional[str] = None,
+        db_password: Optional[str] = None,
+        db_host: Optional[str] = None,
+        db_port: Optional[int] = None,
+        db_driver: Optional[str] = None,
+        sqlite_folder_path: Optional[str] = None,
+        sqlite_file_extension: Optional[str] = None,
+        db_extra_args: Optional[Dict[str, Any]] = None,
+        exposed_type: Optional[str] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new SQL table data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new SQL data node configuration.
+            db_name (str): The database name, or the name of the SQLite database file.
+            db_engine (str): The database engine. Possible values are *"sqlite"*, *"mssql"*, *"mysql"*,
+                or *"postgresql"*.
+            table_name (str): The name of the SQL table.
+            db_username (Optional[str]): The database username. Required by the *"mssql"*, *"mysql"*, and
+                *"postgresql"* engines.
+            db_password (Optional[str]): The database password. Required by the *"mssql"*, *"mysql"*, and
+                *"postgresql"* engines.
+            db_host (Optional[str]): The database host.<br/>
+                The default value is "localhost".
+            db_port (Optional[int]): The database port.<br/>
+                The default value is 1433.
+            db_driver (Optional[str]): The database driver.
+            sqlite_folder_path (Optional[str]): The path to the folder that contains SQLite file.<br/>
+                The default value is the current working folder.
+            sqlite_file_extension (Optional[str]): The file extension of the SQLite file.<br/>
+                The default value is ".db".
+            db_extra_args (Optional[dict[str, any]]): A dictionary of additional arguments to be passed
+                into database connection string.
+            exposed_type (Optional[str]): The exposed type of the data read from SQL table.<br/>
+                The default value is "pandas".
+            scope (Optional[Scope^]): The scope of the SQL data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new SQL data node configuration.
+        """
+    @classmethod
+    def configure_sql_data_node(
+        cls,
+        id: str,
+        db_name: str,
+        db_engine: str,
+        read_query: str,
+        write_query_builder: Callable,
+        db_username: Optional[str] = None,
+        db_password: Optional[str] = None,
+        db_host: Optional[str] = None,
+        db_port: Optional[int] = None,
+        db_driver: Optional[str] = None,
+        sqlite_folder_path: Optional[str] = None,
+        sqlite_file_extension: Optional[str] = None,
+        db_extra_args: Optional[Dict[str, Any]] = None,
+        exposed_type: Optional[str] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new SQL data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new SQL data node configuration.
+            db_name (str): The database name, or the name of the SQLite database file.
+            db_engine (str): The database engine. Possible values are *"sqlite"*, *"mssql"*, *"mysql"*,
+                or *"postgresql"*.
+            read_query (str): The SQL query string used to read the data from the database.
+            write_query_builder (Callable): A callback function that takes the data as an input parameter
+                and returns a list of SQL queries.
+            db_username (Optional[str]): The database username. Required by the *"mssql"*, *"mysql"*, and
+                *"postgresql"* engines.
+            db_password (Optional[str]): The database password. Required by the *"mssql"*, *"mysql"*, and
+                *"postgresql"* engines.
+            db_host (Optional[str]): The database host.<br/>
+                The default value is "localhost".
+            db_port (Optional[int]): The database port.<br/>
+                The default value is 1433.
+            db_driver (Optional[str]): The database driver.
+            sqlite_folder_path (Optional[str]): The path to the folder that contains SQLite file.<br/>
+                The default value is the current working folder.
+            sqlite_file_extension (Optional[str]): The file extension of the SQLite file.<br/>
+                The default value is ".db".
+            db_extra_args (Optional[dict[str, any]]): A dictionary of additional arguments to be passed
+                into database connection string.
+            exposed_type (Optional[str]): The exposed type of the data read from SQL query.<br/>
+                The default value is "pandas".
+            scope (Optional[Scope^]): The scope of the SQL data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+        Returns:
+            The new SQL data node configuration.
+        """
+    @classmethod
+    def configure_mongo_collection_data_node(
+        cls,
+        id: str,
+        db_name: str,
+        collection_name: str,
+        custom_document: Optional[Any] = None,
+        db_username: Optional[str] = None,
+        db_password: Optional[str] = None,
+        db_host: Optional[str] = None,
+        db_port: Optional[int] = None,
+        db_driver: Optional[str] = None,
+        db_extra_args: Optional[Dict[str, Any]] = None,
+        scope: Optional[Scope] = None,
+        validity_period: Optional[timedelta] = None,
+        **properties,
+    ) -> "DataNodeConfig":
+        """Configure a new Mongo collection data node configuration.
+
+        Parameters:
+            id (str): The unique identifier of the new Mongo collection data node configuration.
+            db_name (str): The database name.
+            collection_name (str): The collection in the database to read from and to write the data to.
+            custom_document (Optional[any]): The custom document class to store, encode, and decode data
+                when reading and writing to a Mongo collection. The custom_document can have an optional
+                *decode()* method to decode data in the Mongo collection to a custom object, and an
+                optional *encode()*) method to encode the object's properties to the Mongo collection
+                when writing.
+            db_username (Optional[str]): The database username.
+            db_password (Optional[str]): The database password.
+            db_host (Optional[str]): The database host.<br/>
+                The default value is "localhost".
+            db_port (Optional[int]): The database port.<br/>
+                The default value is 27017.
+            db_driver (Optional[str]): The database driver.
+            db_extra_args (Optional[dict[str, any]]): A dictionary of additional arguments to be passed
+                into database connection string.
+            scope (Optional[Scope^]): The scope of the Mongo collection data node configuration.<br/>
+                The default value is `Scope.SCENARIO`.
+            validity_period (Optional[timedelta]): The duration since the last edit date for which the data node can be
+                considered up-to-date. Once the validity period has passed, the data node is considered stale and
+                relevant tasks will run even if they are skippable (see the
+                [Task configs page](../core/config/task-config.md) for more details).
+                If *validity_period* is set to None, the data node is always up-to-date.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new Mongo collection data node configuration.
+        """
+    @staticmethod
+    def configure_task(
+        id: str,
+        function,
+        input: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
+        output: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
+        skippable: Optional[bool] = False,
+        **properties,
+    ) -> "TaskConfig":
+        """Configure a new task configuration.
+
+        Parameters:
+            id (str): The unique identifier of this task configuration.
+            function (Callable): The python function called by Taipy to run the task.
+            input (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the
+                function input data node configurations. This can be a unique data node
+                configuration if there is a single input data node, or None if there are none.
+            output (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the
+                function output data node configurations. This can be a unique data node
+                configuration if there is a single output data node, or None if there are none.
+            skippable (bool): If True, indicates that the task can be skipped if no change has
+                been made on inputs.<br/>
+                The default value is False.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new task configuration.
+        """
+    @staticmethod
+    def set_default_task_configuration(
+        function,
+        input: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
+        output: Optional[Union[DataNodeConfig, List[DataNodeConfig]]] = None,
+        skippable: Optional[bool] = False,
+        **properties,
+    ) -> "TaskConfig":
+        """Set the default values for task configurations.
+
+        This function creates the *default task configuration* object,
+        where all task configuration objects will find their default
+        values when needed.
+
+        Parameters:
+            function (Callable): The python function called by Taipy to run the task.
+            input (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the
+                input data node configurations. This can be a unique data node
+                configuration if there is a single input data node, or None if there are none.
+            output (Optional[Union[DataNodeConfig^, List[DataNodeConfig^]]]): The list of the
+                output data node configurations. This can be a unique data node
+                configuration if there is a single output data node, or None if there are none.
+            skippable (bool): If True, indicates that the task can be skipped if no change has
+                been made on inputs.<br/>
+                The default value is False.
+            **properties (dict[str, any]): A keyworded variable length list of additional
+                arguments.
+        Returns:
+            The default task configuration.
+        """
+    @staticmethod
+    def configure_job_executions(
+        mode: Optional[str] = None, max_nb_of_workers: Optional[Union[int, str]] = None, **properties
+    ) -> "JobConfig":
+        """Configure job execution.
+
+        Parameters:
+            mode (Optional[str]): The job execution mode.
+                Possible values are: *"standalone"* (the default value) or *"development"*.
+            max_nb_of_workers (Optional[int, str]): Parameter used only in default *"standalone"* mode.
+                This indicates the maximum number of jobs able to run in parallel.<br/>
+                The default value is 1.<br/>
+                A string can be provided to dynamically set the value using an environment
+                variable. The string must follow the pattern: `ENV[&lt;env_var&gt;]` where
+                `&lt;env_var&gt;` is the name of an environment variable.
+            **properties (dict[str, any]): A keyworded variable length list of additional arguments.
+
+        Returns:
+            The new job execution configuration.
+        """
+    @staticmethod
+    def add_migration_function(
+        target_version: str,
+        config: Union[Section, str],
+        migration_fct: Callable,
+        **properties,
+    ):
+        """Add a migration function for a Configuration to migrate entities to the target version.
+
+        Parameters:
+            target_version (str): The production version that entities are migrated to.
+            config (Union[Section, str]): The configuration or the `id` of the config that needs to migrate.
+            migration_fct (Callable): Migration function that takes an entity as input and returns a new entity
+                that is compatible with the target production version.
+            **properties (Dict[str, Any]): A keyworded variable length list of additional arguments.
+        Returns:
+            `MigrationConfig^`: The Migration configuration.
+        """
+    @staticmethod
+    def configure_core(
+        root_folder: Optional[str] = None,
+        storage_folder: Optional[str] = None,
+        repository_type: Optional[str] = None,
+        repository_properties: Optional[Dict[str, Union[str, int]]] = None,
+        read_entity_retry: Optional[int] = None,
+        mode: Optional[str] = None,
+        version_number: Optional[str] = None,
+        force: Optional[bool] = None,
+        **properties,
+    ) -> "CoreSection":
+        """Configure the Core service.
+
+        Parameters:
+            root_folder (Optional[str]): Path of the base folder for the taipy application.
+                The default value is "./taipy/"
+            storage_folder (Optional[str]): Folder name used to store Taipy data. The default value is ".data/".
+                It is used in conjunction with the `root_folder` field. That means the storage path is
+                <root_folder><storage_folder> (The default path is "./taipy/.data/").
+            repository_type (Optional[str]): The type of the repository to be used to store Taipy data.
+                The default value is "filesystem".
+            repository_properties (Optional[Dict[str, Union[str, int]]]): A dictionary of additional properties
+                to be used by the repository.
+            read_entity_retry (Optional[int]): Number of retries to read an entity from the repository
+                before return failure. The default value is 3.
+            mode (Optional[str]): Indicates the mode of the version management system.
+                Possible values are *"development"*, *"experiment"*, or *"production"*.
+            version_number (Optional[str]): The string identifier of the version.
+                 In development mode, the version number is ignored.
+            force (Optional[bool]): If True, Taipy will override a version even if the configuration
+                has changed and run the application.
+            **properties (Dict[str, Any]): A keyworded variable length list of additional arguments configure the
+                behavior of the `Core^` service.
+        Returns:
+            The Core configuration.
+        """

+ 5 - 0
src/taipy/config/contributors.txt

@@ -0,0 +1,5 @@
+jrobinAV
+joaoandre-avaiga
+trgiangdo
+tsuu2092
+Dr-Irv

+ 10 - 0
src/taipy/config/exceptions/__init__.py

@@ -0,0 +1,10 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from .exceptions import *

+ 30 - 0
src/taipy/config/exceptions/exceptions.py

@@ -0,0 +1,30 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+
+class LoadingError(Exception):
+    """Raised if an error occurs while loading the configuration file."""
+
+
+class InconsistentEnvVariableError(Exception):
+    """Inconsistency value has been detected in an environment variable referenced by the configuration."""
+
+
+class MissingEnvVariableError(Exception):
+    """Environment variable referenced in configuration is missing."""
+
+
+class InvalidConfigurationId(Exception):
+    """Configuration id is not valid."""
+
+
+class ConfigurationUpdateBlocked(Exception):
+    """The configuration is being blocked from update by other Taipy services."""

+ 0 - 0
src/taipy/config/global_app/__init__.py


+ 62 - 0
src/taipy/config/global_app/global_app_config.py

@@ -0,0 +1,62 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from __future__ import annotations
+
+from typing import Any, Dict, Optional, Union
+
+from ..common._config_blocker import _ConfigBlocker
+from ..common._template_handler import _TemplateHandler as _tpl
+
+
+class GlobalAppConfig:
+    """
+    Configuration fields related to the global application.
+
+    Attributes:
+        **properties (Dict[str, Any]): A dictionary of additional properties.
+    """
+
+    def __init__(self, **properties):
+        self._properties = properties
+
+    @property
+    def properties(self):
+        return {k: _tpl._replace_templates(v) for k, v in self._properties.items()}
+
+    @properties.setter  # type: ignore
+    @_ConfigBlocker._check()
+    def properties(self, val):
+        self._properties = val
+
+    def __getattr__(self, item: str) -> Optional[Any]:
+        return _tpl._replace_templates(self._properties.get(item))
+
+    @classmethod
+    def default_config(cls) -> GlobalAppConfig:
+        return GlobalAppConfig()
+
+    def _clean(self):
+        self._properties.clear()
+
+    def _to_dict(self):
+        as_dict = {}
+        as_dict.update(self._properties)
+        return as_dict
+
+    @classmethod
+    def _from_dict(cls, config_as_dict: Dict[str, Any]):
+        config = GlobalAppConfig()
+        config._properties = config_as_dict
+        return config
+
+    def _update(self, config_as_dict):
+        self._properties.update(config_as_dict)

+ 72 - 0
src/taipy/config/section.py

@@ -0,0 +1,72 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from abc import abstractmethod
+from typing import Any, Dict, Optional
+
+from .common._config_blocker import _ConfigBlocker
+from .common._template_handler import _TemplateHandler as _tpl
+from .common._validate_id import _validate_id
+
+
+class Section:
+    """A Section as a consistent part of the Config.
+
+    A section is defined by the section name (representing the type of objects that are configured) and a section id.
+    """
+
+    _DEFAULT_KEY = "default"
+    _ID_KEY = "id"
+
+    def __init__(self, id, **properties):
+        self.id = _validate_id(id)
+        self._properties = properties or dict()
+
+    @abstractmethod
+    def __copy__(self):
+        raise NotImplementedError
+
+    @property
+    @abstractmethod
+    def name(self):
+        raise NotImplementedError
+
+    @abstractmethod
+    def _clean(self):
+        raise NotImplementedError
+
+    @abstractmethod
+    def _to_dict(self):
+        raise NotImplementedError
+
+    @classmethod
+    @abstractmethod
+    def _from_dict(cls, config_as_dict: Dict[str, Any], id, config):
+        raise NotImplementedError
+
+    @abstractmethod
+    def _update(self, config_as_dict, default_section=None):
+        raise NotImplementedError
+
+    def __getattr__(self, item: str) -> Optional[Any]:
+        return self._replace_templates(self._properties.get(item, None))
+
+    @property
+    def properties(self):
+        return {k: _tpl._replace_templates(v) for k, v in self._properties.items()}
+
+    @properties.setter  # type: ignore
+    @_ConfigBlocker._check()
+    def properties(self, val):
+        self._properties = val
+
+    def _replace_templates(self, value):
+        return _tpl._replace_templates(value)

+ 62 - 0
src/taipy/config/setup.py

@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+"""The setup script."""
+import json
+import os
+
+from setuptools import find_namespace_packages, find_packages, setup
+
+with open("README.md") as readme_file:
+    readme = readme_file.read()
+
+with open(f"src{os.sep}taipy{os.sep}config{os.sep}version.json") as version_file:
+    version = json.load(version_file)
+    version_string = f'{version.get("major", 0)}.{version.get("minor", 0)}.{version.get("patch", 0)}'
+    if vext := version.get("ext"):
+        version_string = f"{version_string}.{vext}"
+
+requirements = ["toml>=0.10,<0.11", "deepdiff>=6.2,<6.3"]
+
+test_requirements = ["pytest>=3.8"]
+
+setup(
+    author="Avaiga",
+    author_email="dev@taipy.io",
+    python_requires=">=3.8",
+    classifiers=[
+        "Intended Audience :: Developers",
+        "License :: OSI Approved :: Apache Software License",
+        "Natural Language :: English",
+        "Programming Language :: Python :: 3",
+        "Programming Language :: Python :: 3.8",
+        "Programming Language :: Python :: 3.9",
+        "Programming Language :: Python :: 3.10",
+    ],
+    description="A Taipy package dedicated to easily configure a Taipy application.",
+    install_requires=requirements,
+    long_description=readme,
+    long_description_content_type="text/markdown",
+    include_package_data=True,
+    license="Apache License 2.0",
+    keywords="taipy-config",
+    name="taipy-config",
+    package_dir={"": "src"},
+    packages=find_namespace_packages(where="src")
+    + find_packages(include=["taipy", "taipy.config", "taipy.config.*", "taipy.logger", "taipy.logger.*"]),
+    test_suite="tests",
+    tests_require=test_requirements,
+    url="https://github.com/avaiga/taipy-config",
+    version=version_string,
+    zip_safe=False,
+)

+ 165 - 0
src/taipy/config/stubs/generate_pyi.py

@@ -0,0 +1,165 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import ast
+import re
+from pathlib import Path
+from typing import List
+
+
+def _get_function_delimiters(initial_line, lines):
+    begin = end = initial_line
+    while True:
+        if lines[begin - 1] == "\n":
+            break
+        begin -= 1
+
+    if lines[end].endswith("(\n"):
+        while ":\n" not in lines[end]:
+            end += 1
+
+    if '"""' in lines[end + 1]:
+        while True:
+            if '"""\n' in lines[end]:
+                break
+            end += 1
+    return begin, end + 1
+
+
+def _get_file_lines(filename: str) -> List[str]:
+    # Get file lines for later
+    with open(filename) as f:
+        return f.readlines()
+
+
+def _get_file_ast(filename: str):
+    # Get raw text and build ast
+    _config = Path(filename)
+    _tree = _config.read_text()
+    return ast.parse(_tree)
+
+
+def _build_base_config_pyi(filename, base_pyi):
+    lines = _get_file_lines(filename)
+    tree = _get_file_ast(filename)
+
+    class_lineno = [f.lineno for f in ast.walk(tree) if isinstance(f, ast.ClassDef) and f.name == "Config"]
+    begin_class, end_class = _get_function_delimiters(class_lineno[0] - 1, lines)
+
+    base_pyi += "".join(lines[begin_class:end_class])
+    functions = [f.lineno for f in ast.walk(tree) if isinstance(f, ast.FunctionDef) and not f.name.startswith("__")]
+
+    for ln in functions:
+        begin_line, end_line = _get_function_delimiters(ln - 1, lines)
+        base_pyi += "".join(lines[begin_line:end_line])
+
+        base_pyi = __add_docstring(base_pyi, lines, end_line)
+        base_pyi += "\n"
+
+    return base_pyi
+
+
+def __add_docstring(base_pyi, lines, end_line):
+    if '"""' not in lines[end_line - 1]:
+        base_pyi += '\t\t""""""\n'.replace("\t", "    ")
+    return base_pyi
+
+
+def _build_entity_config_pyi(base_pyi, filename, entity_map):
+    lines = _get_file_lines(filename)
+    tree = _get_file_ast(filename)
+    functions = {}
+
+    for f in ast.walk(tree):
+        if isinstance(f, ast.FunctionDef):
+            if "_configure" in f.name and not f.name.startswith("__"):
+                functions[f.name] = f.lineno
+            elif "_set_default" in f.name and not f.name.startswith("__"):
+                functions[f.name] = f.lineno
+            elif "_add" in f.name and not f.name.startswith("__"):
+                functions[f.name] = f.lineno
+
+    for k, v in functions.items():
+        begin_line, end_line = _get_function_delimiters(v - 1, lines)
+        try:
+            func = "".join(lines[begin_line:end_line])
+            func = func if not k.startswith("_") else func.replace(k, entity_map.get(k))
+            func = __add_docstring(func, lines, end_line) + "\n"
+            base_pyi += func
+        except Exception:
+            print(f"key={k}")
+            raise
+
+    return base_pyi
+
+
+def _generate_entity_and_property_maps(filename):
+    entities_map = {}
+    property_map = {}
+    entity_tree = _get_file_ast(filename)
+    functions = [
+        f for f in ast.walk(entity_tree) if isinstance(f, ast.Call) and getattr(f.func, "id", "") == "_inject_section"
+    ]
+
+    for f in functions:
+        entity = ast.unparse(f.args[0])
+        entities_map[entity] = {}
+        property_map[eval(ast.unparse(f.args[1]))] = entity
+        # Remove class name from function map
+        text = ast.unparse(f.args[-1]).replace(f"{entity}.", "")
+        matches = re.findall(r"\((.*?)\)", text)
+
+        for m in matches:
+            v, k = m.replace("'", "").split(",")
+            entities_map[entity][k.strip()] = v
+    return entities_map, property_map
+
+
+def _generate_acessors(base_pyi, property_map):
+    for property, cls in property_map.items():
+        return_template = f"Dict[str, {cls}]" if property != "job_config" else f"{cls}"
+        template = ("\t@_Classproperty\n" + f'\tdef {property}(cls) -> {return_template}:\n\t\t""""""\n').replace(
+            "\t", "    "
+        )
+        base_pyi += template + "\n"
+    return base_pyi
+
+
+def _build_header(filename):
+    _file = Path(filename)
+    return _file.read_text() + "\n\n"
+
+
+if __name__ == "__main__":
+    header_file = "stubs/pyi_header.py"
+    config_init = Path("taipy-core/src/taipy/core/config/__init__.py")
+    base_config = "src/taipy/config/config.py"
+
+    dn_filename = "taipy-core/src/taipy/core/config/data_node_config.py"
+    job_filename = "taipy-core/src/taipy/core/config/job_config.py"
+    scenario_filename = "taipy-core/src/taipy/core/config/scenario_config.py"
+    task_filename = "taipy-core/src/taipy/core/config/task_config.py"
+    migration_filename = "taipy-core/src/taipy/core/config/migration_config.py"
+    core_filename = "taipy-core/src/taipy/core/config/core_section.py"
+
+    entities_map, property_map = _generate_entity_and_property_maps(config_init)
+    pyi = _build_header(header_file)
+    pyi = _build_base_config_pyi(base_config, pyi)
+    pyi = _generate_acessors(pyi, property_map)
+    pyi = _build_entity_config_pyi(pyi, scenario_filename, entities_map["ScenarioConfig"])
+    pyi = _build_entity_config_pyi(pyi, dn_filename, entities_map["DataNodeConfig"])
+    pyi = _build_entity_config_pyi(pyi, task_filename, entities_map["TaskConfig"])
+    pyi = _build_entity_config_pyi(pyi, job_filename, entities_map["JobConfig"])
+    pyi = _build_entity_config_pyi(pyi, migration_filename, entities_map["MigrationConfig"])
+    pyi = _build_entity_config_pyi(pyi, core_filename, entities_map["CoreSection"])
+
+    with open("src/taipy/config/config.pyi", "w") as f:
+        f.writelines(pyi)

+ 25 - 0
src/taipy/config/stubs/pyi_header.py

@@ -0,0 +1,25 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import json
+from datetime import timedelta
+from typing import Any, Callable, Dict, List, Optional, Union
+
+from taipy.core.config import CoreSection, DataNodeConfig, JobConfig, MigrationConfig, ScenarioConfig, TaskConfig
+
+from .checker.issue_collector import IssueCollector
+from .common._classproperty import _Classproperty
+from .common._config_blocker import _ConfigBlocker
+from .common.frequency import Frequency
+from .common.scope import Scope
+from .global_app.global_app_config import GlobalAppConfig
+from .section import Section
+from .unique_section import UniqueSection

+ 10 - 0
src/taipy/config/tests/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 10 - 0
src/taipy/config/tests/config/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 10 - 0
src/taipy/config/tests/config/checker/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 10 - 0
src/taipy/config/tests/config/checker/checkers/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 27 - 0
src/taipy/config/tests/config/checker/checkers/test_checker.py

@@ -0,0 +1,27 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+from unittest import mock
+from unittest.mock import MagicMock
+
+from src.taipy.config import Config
+from src.taipy.config.checker._checker import _Checker
+from src.taipy.config.checker.issue_collector import IssueCollector
+from tests.config.utils.checker_for_tests import CheckerForTest
+
+
+def test_register_checker():
+    checker = CheckerForTest
+    checker._check = MagicMock()
+    _Checker.add_checker(checker)
+    Config.check()
+    checker._check.assert_called_once()

+ 81 - 0
src/taipy/config/tests/config/checker/checkers/test_config_checker.py

@@ -0,0 +1,81 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import logging
+from unittest import mock
+
+from src.taipy.config._config import _Config
+from src.taipy.config.checker._checkers._config_checker import _ConfigChecker
+from src.taipy.config.checker.issue import Issue
+from src.taipy.config.checker.issue_collector import IssueCollector
+
+
+class MyCustomChecker(_ConfigChecker):
+    def _check(self) -> IssueCollector:
+        pass
+
+
+def test__error():
+    with mock.patch.object(logging.Logger, "error"):
+        collector = IssueCollector()
+        assert len(collector.all) == 0
+        _ConfigChecker(_Config(), collector)._error("field", 17, "my message")
+        assert len(collector.all) == 1
+        assert len(collector.errors) == 1
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 0
+        assert collector.errors[0] == Issue(IssueCollector._ERROR_LEVEL, "field", 17, "my message", "_ConfigChecker")
+
+        MyCustomChecker(_Config(), collector)._error("foo", "bar", "baz")
+        assert len(collector.all) == 2
+        assert len(collector.errors) == 2
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 0
+        assert collector.errors[0] == Issue(IssueCollector._ERROR_LEVEL, "field", 17, "my message", "_ConfigChecker")
+        assert collector.errors[1] == Issue(IssueCollector._ERROR_LEVEL, "foo", "bar", "baz", "MyCustomChecker")
+
+
+def test__warning():
+    collector = IssueCollector()
+    assert len(collector.all) == 0
+    _ConfigChecker(_Config(), collector)._warning("field", 17, "my message")
+    assert len(collector.all) == 1
+    assert len(collector.warnings) == 1
+    assert len(collector.errors) == 0
+    assert len(collector.infos) == 0
+    assert collector.warnings[0] == Issue(IssueCollector._WARNING_LEVEL, "field", 17, "my message", "_ConfigChecker")
+
+    MyCustomChecker(_Config(), collector)._warning("foo", "bar", "baz")
+    assert len(collector.all) == 2
+    assert len(collector.warnings) == 2
+    assert len(collector.errors) == 0
+    assert len(collector.infos) == 0
+    assert collector.warnings[0] == Issue(IssueCollector._WARNING_LEVEL, "field", 17, "my message", "_ConfigChecker")
+    assert collector.warnings[1] == Issue(IssueCollector._WARNING_LEVEL, "foo", "bar", "baz", "MyCustomChecker")
+
+
+def test__info():
+    collector = IssueCollector()
+    assert len(collector.all) == 0
+    _ConfigChecker(_Config(), collector)._info("field", 17, "my message")
+    assert len(collector.all) == 1
+    assert len(collector.infos) == 1
+    assert len(collector.errors) == 0
+    assert len(collector.warnings) == 0
+    assert collector.infos[0] == Issue(IssueCollector._INFO_LEVEL, "field", 17, "my message", "_ConfigChecker")
+
+    MyCustomChecker(_Config(), collector)._info("foo", "bar", "baz")
+    assert len(collector.all) == 2
+    assert len(collector.infos) == 2
+    assert len(collector.errors) == 0
+    assert len(collector.warnings) == 0
+    assert collector.infos[0] == Issue(IssueCollector._INFO_LEVEL, "field", 17, "my message", "_ConfigChecker")
+    assert collector.infos[1] == Issue(IssueCollector._INFO_LEVEL, "foo", "bar", "baz", "MyCustomChecker")

+ 22 - 0
src/taipy/config/tests/config/checker/test_default_config_checker.py

@@ -0,0 +1,22 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from src.taipy.config._config import _Config
+from src.taipy.config.checker._checker import _Checker
+
+
+class TestDefaultConfigChecker:
+    def test_check_default_config(self):
+        config = _Config._default_config()
+        collector = _Checker._check(config)
+        assert len(collector._errors) == 0
+        assert len(collector._infos) == 0
+        assert len(collector._warnings) == 0

+ 94 - 0
src/taipy/config/tests/config/checker/test_issue_collector.py

@@ -0,0 +1,94 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from src.taipy.config.checker.issue import Issue
+from src.taipy.config.checker.issue_collector import IssueCollector
+
+
+class TestIssueCollector:
+    def test_add_error(self):
+        collector = IssueCollector()
+        assert len(collector.errors) == 0
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 0
+        assert len(collector.all) == 0
+        collector._add_error("field", "value", "message", "checker")
+        assert len(collector.errors) == 1
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 0
+        assert len(collector.all) == 1
+        assert collector.all[0] == Issue(IssueCollector._ERROR_LEVEL, "field", "value", "message", "checker")
+        collector._add_error("field", "value", "message", "checker")
+        assert len(collector.errors) == 2
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 0
+        assert len(collector.all) == 2
+        assert collector.all[0] == Issue(IssueCollector._ERROR_LEVEL, "field", "value", "message", "checker")
+        assert collector.all[1] == Issue(IssueCollector._ERROR_LEVEL, "field", "value", "message", "checker")
+
+    def test_add_warning(self):
+        collector = IssueCollector()
+        assert len(collector.errors) == 0
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 0
+        assert len(collector.all) == 0
+        collector._add_warning("field", "value", "message", "checker")
+        assert len(collector.errors) == 0
+        assert len(collector.warnings) == 1
+        assert len(collector.infos) == 0
+        assert len(collector.all) == 1
+        assert collector.all[0] == Issue(IssueCollector._WARNING_LEVEL, "field", "value", "message", "checker")
+        collector._add_warning("field", "value", "message", "checker")
+        assert len(collector.errors) == 0
+        assert len(collector.warnings) == 2
+        assert len(collector.infos) == 0
+        assert len(collector.all) == 2
+        assert collector.all[0] == Issue(IssueCollector._WARNING_LEVEL, "field", "value", "message", "checker")
+        assert collector.all[1] == Issue(IssueCollector._WARNING_LEVEL, "field", "value", "message", "checker")
+
+    def test_add_info(self):
+        collector = IssueCollector()
+        assert len(collector.errors) == 0
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 0
+        assert len(collector.all) == 0
+        collector._add_info("field", "value", "message", "checker")
+        assert len(collector.errors) == 0
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 1
+        assert len(collector.all) == 1
+        assert collector.all[0] == Issue(IssueCollector._INFO_LEVEL, "field", "value", "message", "checker")
+        collector._add_info("field", "value", "message", "checker")
+        assert len(collector.errors) == 0
+        assert len(collector.warnings) == 0
+        assert len(collector.infos) == 2
+        assert len(collector.all) == 2
+        assert collector.all[0] == Issue(IssueCollector._INFO_LEVEL, "field", "value", "message", "checker")
+        assert collector.all[1] == Issue(IssueCollector._INFO_LEVEL, "field", "value", "message", "checker")
+
+    def test_all(self):
+        collector = IssueCollector()
+        collector._add_info("foo", "bar", "baz", "qux")
+        assert collector.all[0] == Issue(IssueCollector._INFO_LEVEL, "foo", "bar", "baz", "qux")
+        collector._add_warning("foo2", "bar2", "baz2", "qux2")
+        assert collector.all[0] == Issue(IssueCollector._WARNING_LEVEL, "foo2", "bar2", "baz2", "qux2")
+        assert collector.all[1] == Issue(IssueCollector._INFO_LEVEL, "foo", "bar", "baz", "qux")
+        collector._add_warning("foo3", "bar3", "baz3", "qux3")
+        assert collector.all[0] == Issue(IssueCollector._WARNING_LEVEL, "foo2", "bar2", "baz2", "qux2")
+        assert collector.all[1] == Issue(IssueCollector._WARNING_LEVEL, "foo3", "bar3", "baz3", "qux3")
+        assert collector.all[2] == Issue(IssueCollector._INFO_LEVEL, "foo", "bar", "baz", "qux")
+        collector._add_info("field", "value", "message", "checker")
+        collector._add_error("field", "value", "message", "checker")
+        assert collector.all[0] == Issue(IssueCollector._ERROR_LEVEL, "field", "value", "message", "checker")
+        assert collector.all[1] == Issue(IssueCollector._WARNING_LEVEL, "foo2", "bar2", "baz2", "qux2")
+        assert collector.all[2] == Issue(IssueCollector._WARNING_LEVEL, "foo3", "bar3", "baz3", "qux3")
+        assert collector.all[3] == Issue(IssueCollector._INFO_LEVEL, "foo", "bar", "baz", "qux")
+        assert collector.all[4] == Issue(IssueCollector._INFO_LEVEL, "field", "value", "message", "checker")

+ 10 - 0
src/taipy/config/tests/config/common/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 138 - 0
src/taipy/config/tests/config/common/test_argparser.py

@@ -0,0 +1,138 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import argparse
+import re
+import sys
+
+import pytest
+
+from src.taipy._cli._base_cli import _CLI
+
+if sys.version_info >= (3, 10):
+    argparse_options_str = "options:"
+else:
+    argparse_options_str = "optional arguments:"
+
+
+def preprocess_stdout(stdout):
+    stdout = stdout.replace("\n", " ").replace("\t", " ")
+    return re.sub(" +", " ", stdout)
+
+
+def remove_subparser(name: str):
+    """Remove a subparser from argparse."""
+    _CLI._sub_taipyparsers.pop(name, None)
+
+    if _CLI._subparser_action:
+        _CLI._subparser_action._name_parser_map.pop(name, None)
+
+        for action in _CLI._subparser_action._choices_actions:
+            if action.dest == name:
+                _CLI._subparser_action._choices_actions.remove(action)
+
+
+@pytest.fixture(autouse=True, scope="function")
+def clean_argparser():
+    _CLI._parser = argparse.ArgumentParser(conflict_handler="resolve")
+    _CLI._arg_groups = {}
+    subcommands = list(_CLI._sub_taipyparsers.keys())
+    for subcommand in subcommands:
+        remove_subparser(subcommand)
+
+    yield
+
+
+def test_subparser(capfd):
+    subcommand_1 = _CLI._add_subparser("subcommand_1", help="subcommand_1 help")
+    subcommand_1.add_argument("--foo", "-f", help="foo help")
+    subcommand_1.add_argument("--bar", "-b", help="bar help")
+
+    subcommand_2 = _CLI._add_subparser("subcommand_2", help="subcommand_2 help")
+    subcommand_2.add_argument("--doo", "-d", help="doo help")
+    subcommand_2.add_argument("--baz", "-z", help="baz help")
+
+    expected_subcommand_1_help_message = f"""subcommand_1 [-h] [--foo FOO] [--bar BAR]
+
+{argparse_options_str}
+  -h, --help         show this help message and exit
+  --foo FOO, -f FOO  foo help
+  --bar BAR, -b BAR  bar help
+    """
+
+    subcommand_1.print_help()
+    stdout, _ = capfd.readouterr()
+    assert preprocess_stdout(expected_subcommand_1_help_message) in preprocess_stdout(stdout)
+
+    expected_subcommand_2_help_message = f"""subcommand_2 [-h] [--doo DOO] [--baz BAZ]
+
+{argparse_options_str}
+  -h, --help         show this help message and exit
+  --doo DOO, -d DOO  doo help
+  --baz BAZ, -z BAZ  baz help
+    """
+
+    subcommand_2.print_help()
+    stdout, _ = capfd.readouterr()
+    assert preprocess_stdout(expected_subcommand_2_help_message) in preprocess_stdout(stdout)
+
+
+def test_duplicate_subcommand():
+    subcommand_1 = _CLI._add_subparser("subcommand_1", help="subcommand_1 help")
+    subcommand_1.add_argument("--foo", "-f", help="foo help")
+
+    subcommand_2 = _CLI._add_subparser("subcommand_1", help="subcommand_2 help")
+    subcommand_2.add_argument("--bar", "-b", help="bar help")
+
+    # The title of subcommand_2 is duplicated with  subcommand_1, and therefore
+    # there will be no new subcommand created
+    assert len(_CLI._sub_taipyparsers) == 1
+
+
+def test_groupparser(capfd):
+    group_1 = _CLI._add_groupparser("group_1", "group_1 desc")
+    group_1.add_argument("--foo", "-f", help="foo help")
+    group_1.add_argument("--bar", "-b", help="bar help")
+
+    group_2 = _CLI._add_groupparser("group_2", "group_2 desc")
+    group_2.add_argument("--doo", "-d", help="doo help")
+    group_2.add_argument("--baz", "-z", help="baz help")
+
+    expected_help_message = """
+group_1:
+  group_1 desc
+
+  --foo FOO, -f FOO  foo help
+  --bar BAR, -b BAR  bar help
+
+group_2:
+  group_2 desc
+
+  --doo DOO, -d DOO  doo help
+  --baz BAZ, -z BAZ  baz help
+    """.strip()
+
+    _CLI._parser.print_help()
+    stdout, _ = capfd.readouterr()
+
+    assert expected_help_message in stdout
+
+
+def test_duplicate_group():
+    group_1 = _CLI._add_groupparser("group_1", "group_1 desc")
+    group_1.add_argument("--foo", "-f", help="foo help")
+
+    group_2 = _CLI._add_groupparser("group_1", "group_2 desc")
+    group_2.add_argument("--bar", "-b", help="bar help")
+
+    # The title of group_2 is duplicated with  group_1, and therefore
+    # there will be no new group created
+    assert len(_CLI._arg_groups) == 1

+ 27 - 0
src/taipy/config/tests/config/common/test_classproperty.py

@@ -0,0 +1,27 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import pytest
+
+from src.taipy.config.common._classproperty import _Classproperty
+
+
+class TestClassProperty:
+    def test_class_property(self):
+        class TestClass:
+            @_Classproperty
+            def test_property(cls):
+                return "test_property"
+
+        assert TestClass.test_property == "test_property"
+        assert TestClass().test_property == "test_property"
+        with pytest.raises(TypeError):
+            TestClass.test_property()

+ 50 - 0
src/taipy/config/tests/config/common/test_scope.py

@@ -0,0 +1,50 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import pytest
+
+from src.taipy.config.common.scope import Scope
+
+
+def test_scope():
+    # Test __ge__ method
+    assert Scope.GLOBAL >= Scope.GLOBAL
+    assert Scope.GLOBAL >= Scope.CYCLE
+    assert Scope.CYCLE >= Scope.CYCLE
+    assert Scope.GLOBAL >= Scope.SCENARIO
+    assert Scope.CYCLE >= Scope.SCENARIO
+    assert Scope.SCENARIO >= Scope.SCENARIO
+    with pytest.raises(TypeError):
+        assert Scope.SCENARIO >= "testing string"
+
+    # Test __gt__ method
+    assert Scope.GLOBAL > Scope.CYCLE
+    assert Scope.GLOBAL > Scope.SCENARIO
+    assert Scope.CYCLE > Scope.SCENARIO
+    with pytest.raises(TypeError):
+        assert Scope.SCENARIO > "testing string"
+
+    # Test __le__ method
+    assert Scope.GLOBAL <= Scope.GLOBAL
+    assert Scope.CYCLE <= Scope.GLOBAL
+    assert Scope.CYCLE <= Scope.CYCLE
+    assert Scope.SCENARIO <= Scope.GLOBAL
+    assert Scope.SCENARIO <= Scope.CYCLE
+    assert Scope.SCENARIO <= Scope.SCENARIO
+    with pytest.raises(TypeError):
+        assert Scope.SCENARIO <= "testing string"
+
+    # Test __lt__ method
+    assert Scope.SCENARIO < Scope.GLOBAL
+    assert Scope.SCENARIO < Scope.GLOBAL
+    assert Scope.SCENARIO < Scope.CYCLE
+    with pytest.raises(TypeError):
+        assert Scope.SCENARIO < "testing string"

+ 198 - 0
src/taipy/config/tests/config/common/test_template_handler.py

@@ -0,0 +1,198 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import datetime
+import os
+from unittest import mock
+
+import pytest
+from src.taipy.config.common._template_handler import _TemplateHandler
+from src.taipy.config.common.frequency import Frequency
+from src.taipy.config.common.scope import Scope
+from src.taipy.config.exceptions.exceptions import InconsistentEnvVariableError
+
+
+def test_replace_if_template():
+    assert_does_not_change("123")
+    assert_does_not_change("foo")
+    assert_does_not_change("_foo")
+    assert_does_not_change("_foo_")
+    assert_does_not_change("foo_")
+    assert_does_not_change("foo")
+    assert_does_not_change("foo_1")
+    assert_does_not_change("1foo_1")
+    assert_does_not_change("env(foo)")
+    assert_does_not_change("env<foo>")
+    assert_does_not_change("env[foo]")
+    assert_does_not_change("Env[foo]")
+    assert_does_not_change("ENV[1foo]")
+
+    assert_does_not_change("123:bool")
+    assert_does_not_change("foo:bool")
+    assert_does_not_change("_foo:bool")
+    assert_does_not_change("_foo_:bool")
+    assert_does_not_change("foo_:bool")
+    assert_does_not_change("foo:bool")
+    assert_does_not_change("foo_1:bool")
+    assert_does_not_change("1foo_1:bool")
+    assert_does_not_change("env(foo):bool")
+    assert_does_not_change("env<foo>:bool")
+    assert_does_not_change("env[foo]:bool")
+    assert_does_not_change("Env[foo]:bool")
+    assert_does_not_change("ENV[1foo]:bool")
+
+    assert_does_not_change("ENV[foo]:")
+    assert_does_not_change("ENV[_foo]:")
+    assert_does_not_change("ENV[foo_]:")
+    assert_does_not_change("ENV[foo0]:")
+    assert_does_not_change("ENV[foo_0]:")
+    assert_does_not_change("ENV[_foo_0]:")
+
+    assert_does_not_change("ENV[foo]:foo")
+    assert_does_not_change("ENV[_foo]:foo")
+    assert_does_not_change("ENV[foo_]:foo")
+    assert_does_not_change("ENV[foo0]:foo")
+    assert_does_not_change("ENV[foo_0]:foo")
+    assert_does_not_change("ENV[_foo_0]:foo")
+
+    assert_does_replace("ENV[foo]", "foo", "VALUE", str)
+    assert_does_replace("ENV[_foo]", "_foo", "VALUE", str)
+    assert_does_replace("ENV[foo_]", "foo_", "VALUE", str)
+    assert_does_replace("ENV[foo0]", "foo0", "VALUE", str)
+    assert_does_replace("ENV[foo_0]", "foo_0", "VALUE", str)
+    assert_does_replace("ENV[_foo_0]", "_foo_0", "VALUE", str)
+
+    assert_does_replace("ENV[foo]:str", "foo", "VALUE", str)
+    assert_does_replace("ENV[_foo]:str", "_foo", "VALUE", str)
+    assert_does_replace("ENV[foo_]:str", "foo_", "VALUE", str)
+    assert_does_replace("ENV[foo0]:str", "foo0", "VALUE", str)
+    assert_does_replace("ENV[foo_0]:str", "foo_0", "VALUE", str)
+    assert_does_replace("ENV[_foo_0]:str", "_foo_0", "VALUE", str)
+
+    assert_does_replace("ENV[foo]:int", "foo", "1", int)
+    assert_does_replace("ENV[_foo]:int", "_foo", "1", int)
+    assert_does_replace("ENV[foo_]:int", "foo_", "1", int)
+    assert_does_replace("ENV[foo0]:int", "foo0", "1", int)
+    assert_does_replace("ENV[foo_0]:int", "foo_0", "1", int)
+    assert_does_replace("ENV[_foo_0]:int", "_foo_0", "1", int)
+
+    assert_does_replace("ENV[foo]:float", "foo", "1.", float)
+    assert_does_replace("ENV[_foo]:float", "_foo", "1.", float)
+    assert_does_replace("ENV[foo_]:float", "foo_", "1.", float)
+    assert_does_replace("ENV[foo0]:float", "foo0", "1.", float)
+    assert_does_replace("ENV[foo_0]:float", "foo_0", "1.", float)
+    assert_does_replace("ENV[_foo_0]:float", "_foo_0", "1.", float)
+
+    assert_does_replace("ENV[foo]:bool", "foo", "True", bool)
+    assert_does_replace("ENV[_foo]:bool", "_foo", "True", bool)
+    assert_does_replace("ENV[foo_]:bool", "foo_", "True", bool)
+    assert_does_replace("ENV[foo0]:bool", "foo0", "True", bool)
+    assert_does_replace("ENV[foo_0]:bool", "foo_0", "True", bool)
+    assert_does_replace("ENV[_foo_0]:bool", "_foo_0", "True", bool)
+
+
+def assert_does_replace(template, env_variable_name, replaced_by, as_type):
+    with mock.patch.dict(os.environ, {env_variable_name: replaced_by}):
+        tpl = _TemplateHandler()
+        assert tpl._replace_templates(template) == as_type(replaced_by)
+
+
+def assert_does_not_change(template):
+    tpl = _TemplateHandler()
+    assert tpl._replace_templates(template) == template
+
+
+def test_replace_tuple_list_dict():
+    with mock.patch.dict(os.environ, {"FOO": "true", "BAR": "3", "BAZ": "qux"}):
+        tpl = _TemplateHandler()
+        now = datetime.datetime.now()
+        actual = tpl._replace_templates(("ENV[FOO]:bool", now, "ENV[BAR]:int", "ENV[BAZ]", "quz"))
+        assert actual == (True, now, 3, "qux", "quz")
+        actual = tpl._replace_templates(("ENV[FOO]:bool", now, "ENV[BAR]:int", "ENV[BAZ]", "quz"))
+        assert actual == (True, now, 3, "qux", "quz")
+
+
+def test_to_bool():
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_bool("okhds")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_bool("no")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_bool("tru")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_bool("tru_e")
+
+    assert _TemplateHandler._to_bool("true")
+    assert _TemplateHandler._to_bool("True")
+    assert _TemplateHandler._to_bool("TRUE")
+    assert _TemplateHandler._to_bool("TruE")
+    assert _TemplateHandler._to_bool("TrUE")
+
+    assert not _TemplateHandler._to_bool("false")
+    assert not _TemplateHandler._to_bool("False")
+    assert not _TemplateHandler._to_bool("FALSE")
+    assert not _TemplateHandler._to_bool("FalSE")
+    assert not _TemplateHandler._to_bool("FalSe")
+
+
+def test_to_int():
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_int("okhds")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_int("_45")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_int("12.5")
+
+    assert 12 == _TemplateHandler._to_int("12")
+    assert 0 == _TemplateHandler._to_int("0")
+    assert -2 == _TemplateHandler._to_int("-2")
+    assert 156165 == _TemplateHandler._to_int("156165")
+
+
+def test_to_float():
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_float("okhds")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_float("_45")
+
+    assert 12.5 == _TemplateHandler._to_float("12.5")
+    assert 2.0 == _TemplateHandler._to_float("2")
+    assert 0.0 == _TemplateHandler._to_float("0")
+    assert -2.1 == _TemplateHandler._to_float("-2.1")
+    assert 156165.3 == _TemplateHandler._to_float("156165.3")
+
+
+def test_to_scope():
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_scope("okhds")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_scope("plop")
+
+    assert Scope.GLOBAL == _TemplateHandler._to_scope("global")
+    assert Scope.GLOBAL == _TemplateHandler._to_scope("GLOBAL")
+    assert Scope.SCENARIO == _TemplateHandler._to_scope("SCENARIO")
+    assert Scope.CYCLE == _TemplateHandler._to_scope("cycle")
+
+
+def test_to_frequency():
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_frequency("okhds")
+    with pytest.raises(InconsistentEnvVariableError):
+        _TemplateHandler._to_frequency("plop")
+
+    assert Frequency.DAILY == _TemplateHandler._to_frequency("DAILY")
+    assert Frequency.DAILY == _TemplateHandler._to_frequency("Daily")
+    assert Frequency.WEEKLY == _TemplateHandler._to_frequency("weekly")
+    assert Frequency.WEEKLY == _TemplateHandler._to_frequency("WEEKLY")
+    assert Frequency.MONTHLY == _TemplateHandler._to_frequency("Monthly")
+    assert Frequency.MONTHLY == _TemplateHandler._to_frequency("MONThLY")
+    assert Frequency.QUARTERLY == _TemplateHandler._to_frequency("QuaRtERlY")
+    assert Frequency.YEARLY == _TemplateHandler._to_frequency("Yearly")

+ 47 - 0
src/taipy/config/tests/config/common/test_validate_id.py

@@ -0,0 +1,47 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import pytest
+
+from src.taipy.config.common._validate_id import _validate_id
+from src.taipy.config.exceptions.exceptions import InvalidConfigurationId
+
+
+class TestId:
+    def test_validate_id(self):
+        s = _validate_id("foo")
+        assert s == "foo"
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("1foo")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("foo bar")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("foo/foo$")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id(" ")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("class")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("def")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("with")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("CYCLE")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("SCENARIO")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("SEQUENCE")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("TASK")
+        with pytest.raises(InvalidConfigurationId):
+            _validate_id("DATANODE")

+ 49 - 0
src/taipy/config/tests/config/conftest.py

@@ -0,0 +1,49 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import pytest
+
+from src.taipy.config._config import _Config
+from src.taipy.config._config_comparator._config_comparator import _ConfigComparator
+from src.taipy.config._serializer._toml_serializer import _TomlSerializer
+from src.taipy.config.checker.issue_collector import IssueCollector
+from src.taipy.config.config import Config
+from src.taipy.config.section import Section
+from tests.config.utils.section_for_tests import SectionForTest
+from tests.config.utils.unique_section_for_tests import UniqueSectionForTest
+
+
+@pytest.fixture(scope="function", autouse=True)
+def reset():
+    reset_configuration_singleton()
+    register_test_sections()
+
+
+def reset_configuration_singleton():
+    Config.unblock_update()
+    Config._default_config = _Config()._default_config()
+    Config._python_config = _Config()
+    Config._file_config = _Config()
+    Config._env_file_config = _Config()
+    Config._applied_config = _Config()
+    Config._collector = IssueCollector()
+    Config._serializer = _TomlSerializer()
+    Config._comparator = _ConfigComparator()
+
+
+def register_test_sections():
+    Config._register_default(UniqueSectionForTest("default_attribute"))
+    Config.configure_unique_section_for_tests = UniqueSectionForTest._configure
+    Config.unique_section_name = Config.unique_sections[UniqueSectionForTest.name]
+
+    Config._register_default(SectionForTest(Section._DEFAULT_KEY, "default_attribute", prop="default_prop", prop_int=0))
+    Config.configure_section_for_tests = SectionForTest._configure
+    Config.section_name = Config.sections[SectionForTest.name]

+ 10 - 0
src/taipy/config/tests/config/global_app/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 46 - 0
src/taipy/config/tests/config/global_app/test_global_app_config.py

@@ -0,0 +1,46 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+from unittest import mock
+
+import pytest
+
+from src.taipy.config.config import Config
+from src.taipy.config.exceptions.exceptions import ConfigurationUpdateBlocked
+
+
+def test_global_config_with_env_variable_value():
+    with mock.patch.dict(os.environ, {"FOO": "bar", "BAZ": "qux"}):
+        Config.configure_global_app(foo="ENV[FOO]", bar="ENV[BAZ]")
+        assert Config.global_config.foo == "bar"
+        assert Config.global_config.bar == "qux"
+
+
+def test_default_global_app_config():
+    global_config = Config.global_config
+    assert global_config is not None
+    assert not global_config.notification
+    assert len(global_config.properties) == 0
+
+
+def test_block_update_global_app_config():
+    Config.block_update()
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        Config.configure_global_app(foo="bar")
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        Config.global_config.properties = {"foo": "bar"}
+
+    # Test if the global_config stay as default
+    assert Config.global_config.foo is None
+    assert len(Config.global_config.properties) == 0

+ 152 - 0
src/taipy/config/tests/config/test_compilation.py

@@ -0,0 +1,152 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import pytest
+
+from src.taipy.config.config import Config
+from src.taipy.config.section import Section
+from tests.config.utils.named_temporary_file import NamedTemporaryFile
+from tests.config.utils.section_for_tests import SectionForTest
+from tests.config.utils.section_of_sections_list_for_tests import SectionOfSectionsListForTest
+
+
+@pytest.fixture
+def _init_list_section_for_test():
+    Config._register_default(SectionOfSectionsListForTest(Section._DEFAULT_KEY, [], prop="default_prop", prop_int=0))
+    Config.configure_list_section_for_tests = SectionOfSectionsListForTest._configure
+    Config.list_section_name = Config.sections[SectionOfSectionsListForTest.name]
+
+
+def test_applied_config_compilation_does_not_change_other_configs():
+    assert len(Config._default_config._unique_sections) == 1
+    assert Config._default_config._unique_sections["unique_section_name"] is not None
+    assert Config._default_config._unique_sections["unique_section_name"].attribute == "default_attribute"
+    assert Config._default_config._unique_sections["unique_section_name"].prop is None
+    assert len(Config._python_config._unique_sections) == 0
+    assert len(Config._file_config._unique_sections) == 0
+    assert len(Config._env_file_config._unique_sections) == 0
+    assert len(Config._applied_config._unique_sections) == 1
+    assert Config._applied_config._unique_sections["unique_section_name"] is not None
+    assert Config._applied_config._unique_sections["unique_section_name"].attribute == "default_attribute"
+    assert Config._applied_config._unique_sections["unique_section_name"].prop is None
+    assert len(Config.unique_sections) == 1
+    assert Config.unique_sections["unique_section_name"] is not None
+    assert Config.unique_sections["unique_section_name"].attribute == "default_attribute"
+    assert Config.unique_sections["unique_section_name"].prop is None
+    assert (
+        Config._applied_config._unique_sections["unique_section_name"]
+        is not Config._default_config._unique_sections["unique_section_name"]
+    )
+
+    Config.configure_unique_section_for_tests("qwe", prop="rty")
+
+    assert len(Config._default_config._unique_sections) == 1
+    assert Config._default_config._unique_sections["unique_section_name"] is not None
+    assert Config._default_config._unique_sections["unique_section_name"].attribute == "default_attribute"
+    assert Config._default_config._unique_sections["unique_section_name"].prop is None
+    assert len(Config._python_config._unique_sections) == 1
+    assert Config._python_config._unique_sections["unique_section_name"] is not None
+    assert Config._python_config._unique_sections["unique_section_name"].attribute == "qwe"
+    assert Config._python_config._unique_sections["unique_section_name"].prop == "rty"
+    assert (
+        Config._python_config._unique_sections["unique_section_name"]
+        != Config._default_config._unique_sections["unique_section_name"]
+    )
+    assert len(Config._file_config._unique_sections) == 0
+    assert len(Config._env_file_config._unique_sections) == 0
+    assert len(Config._applied_config._unique_sections) == 1
+    assert Config._applied_config._unique_sections["unique_section_name"] is not None
+    assert Config._applied_config._unique_sections["unique_section_name"].attribute == "qwe"
+    assert Config._applied_config._unique_sections["unique_section_name"].prop == "rty"
+    assert (
+        Config._python_config._unique_sections["unique_section_name"]
+        != Config._applied_config._unique_sections["unique_section_name"]
+    )
+    assert (
+        Config._default_config._unique_sections["unique_section_name"]
+        != Config._applied_config._unique_sections["unique_section_name"]
+    )
+    assert len(Config.unique_sections) == 1
+    assert Config.unique_sections["unique_section_name"] is not None
+    assert Config.unique_sections["unique_section_name"].attribute == "qwe"
+    assert Config.unique_sections["unique_section_name"].prop == "rty"
+
+
+def test_nested_section_instance_in_python(_init_list_section_for_test):
+    s1_cfg = Config.configure_section_for_tests("s1", attribute="foo")
+    s2_cfg = Config.configure_section_for_tests("s2", attribute="bar")
+    ss_cfg = Config.configure_list_section_for_tests("ss", attribute="foo", sections_list=[s1_cfg, s2_cfg])
+
+    s1_config_applied_instance = Config.section_name["s1"]
+    s1_config_python_instance = Config._python_config._sections[SectionForTest.name]["s1"]
+
+    s2_config_applied_instance = Config.section_name["s2"]
+    s2_config_python_instance = Config._python_config._sections[SectionForTest.name]["s2"]
+
+    assert ss_cfg.sections_list[0] is s1_config_applied_instance
+    assert ss_cfg.sections_list[0] is not s1_config_python_instance
+    assert ss_cfg.sections_list[1] is s2_config_applied_instance
+    assert ss_cfg.sections_list[1] is not s2_config_python_instance
+
+
+def _configure_in_toml():
+    return NamedTemporaryFile(
+        content="""
+[TAIPY]
+
+[section_name.s1]
+attribute = "foo"
+
+[section_name.s2]
+attribute = "bar"
+
+[list_section_name.ss]
+sections_list = [ "foo", "s1:SECTION", "s2:SECTION"]
+    """
+    )
+
+
+def test_nested_section_instance_load_toml(_init_list_section_for_test):
+    toml_config = _configure_in_toml()
+    Config.load(toml_config)
+
+    s1_config_applied_instance = Config.section_name["s1"]
+    s1_config_python_instance = Config._python_config._sections[SectionForTest.name]["s1"]
+
+    s2_config_applied_instance = Config.section_name["s2"]
+    s2_config_python_instance = Config._python_config._sections[SectionForTest.name]["s2"]
+
+    ss_cfg = Config.list_section_name["ss"]
+
+    assert ss_cfg.sections_list[0] == "foo"
+    assert ss_cfg.sections_list[1] is s1_config_applied_instance
+    assert ss_cfg.sections_list[1] is not s1_config_python_instance
+    assert ss_cfg.sections_list[2] is s2_config_applied_instance
+    assert ss_cfg.sections_list[2] is not s2_config_python_instance
+
+
+def test_nested_section_instance_override_toml(_init_list_section_for_test):
+    toml_config = _configure_in_toml()
+    Config.override(toml_config)
+
+    s1_config_applied_instance = Config.section_name["s1"]
+    s1_config_python_instance = Config._file_config._sections[SectionForTest.name]["s1"]
+
+    s2_config_applied_instance = Config.section_name["s2"]
+    s2_config_python_instance = Config._file_config._sections[SectionForTest.name]["s2"]
+
+    ss_cfg = Config.list_section_name["ss"]
+
+    assert ss_cfg.sections_list[0] == "foo"
+    assert ss_cfg.sections_list[1] is s1_config_applied_instance
+    assert ss_cfg.sections_list[1] is not s1_config_python_instance
+    assert ss_cfg.sections_list[2] is s2_config_applied_instance
+    assert ss_cfg.sections_list[1] is not s2_config_python_instance

+ 356 - 0
src/taipy/config/tests/config/test_config_comparator.py

@@ -0,0 +1,356 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from unittest import mock
+
+from src.taipy.config import Config
+from src.taipy.config._config import _Config
+from src.taipy.config._config_comparator._comparator_result import _ComparatorResult
+from src.taipy.config.global_app.global_app_config import GlobalAppConfig
+
+from tests.config.utils.section_for_tests import SectionForTest
+from tests.config.utils.unique_section_for_tests import UniqueSectionForTest
+
+
+class TestConfigComparator:
+    unique_section_1 = UniqueSectionForTest(attribute="unique_attribute_1", prop="unique_prop_1")
+    unique_section_1b = UniqueSectionForTest(attribute="unique_attribute_1", prop="unique_prop_1b")
+    section_1 = SectionForTest("section_1", attribute="attribute_1", prop="prop_1")
+    section_2 = SectionForTest("section_2", attribute=2, prop="prop_2")
+    section_2b = SectionForTest("section_2", attribute="attribute_2", prop="prop_2b")
+    section_3 = SectionForTest("section_3", attribute=[1, 2, 3, 4], prop=["prop_1"])
+    section_3b = SectionForTest("section_3", attribute=[1, 2], prop=["prop_1", "prop_2", "prop_3"])
+    section_3c = SectionForTest("section_3", attribute=[2, 1], prop=["prop_3", "prop_1", "prop_2"])
+
+    def test_comparator_compare_method_call(self):
+        _config_1 = _Config._default_config()
+        _config_2 = _Config._default_config()
+
+        with mock.patch(
+            "src.taipy.config._config_comparator._config_comparator._ConfigComparator._find_conflict_config"
+        ) as mck:
+            Config._comparator._find_conflict_config(_config_1, _config_2)
+            mck.assert_called_once_with(_config_1, _config_2)
+
+    def test_comparator_without_diff(self):
+        _config_1 = _Config._default_config()
+        _config_2 = _Config._default_config()
+
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+        assert isinstance(config_diff, _ComparatorResult)
+        assert config_diff == {}
+
+    def test_comparator_with_updated_global_config(self):
+        _config_1 = _Config._default_config()
+        _config_1._global_config = GlobalAppConfig(foo="bar")
+
+        _config_2 = _Config._default_config()
+        _config_2._global_config = GlobalAppConfig(foo="baz", bar="foo")
+
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        assert config_diff.get("unconflicted_sections") is None
+        assert config_diff.get("conflicted_sections") is not None
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["modified_items"]) == 1
+        assert conflicted_config_diff["modified_items"][0] == (
+            ("Global Configuration", "foo", None),
+            ("bar", "baz"),
+        )
+        assert len(conflicted_config_diff["added_items"]) == 1
+        assert conflicted_config_diff["added_items"][0] == (
+            ("Global Configuration", "bar", None),
+            "foo",
+        )
+
+    def test_comparator_with_new_section(self):
+        _config_1 = _Config._default_config()
+
+        # The first "section_name" is added to the Config
+        _config_2 = _Config._default_config()
+        _config_2._sections[SectionForTest.name] = {"section_1": self.section_1}
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["added_items"]) == 1
+        assert conflicted_config_diff["added_items"][0] == (
+            ("section_name", None, None),
+            {"section_1": {"attribute": "attribute_1", "prop": "prop_1"}},
+        )
+        assert conflicted_config_diff.get("modified_items") is None
+        assert conflicted_config_diff.get("removed_items") is None
+
+        # A new "section_name" is added to the Config
+        _config_3 = _Config._default_config()
+        _config_3._sections[SectionForTest.name] = {"section_1": self.section_1, "section_2": self.section_2}
+        config_diff = Config._comparator._find_conflict_config(_config_2, _config_3)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["added_items"]) == 1
+        assert conflicted_config_diff["added_items"][0] == (
+            ("section_name", "section_2", None),
+            {"attribute": "2:int", "prop": "prop_2"},
+        )
+        assert conflicted_config_diff.get("modified_items") is None
+        assert conflicted_config_diff.get("removed_items") is None
+
+    def test_comparator_with_removed_section(self):
+        _config_1 = _Config._default_config()
+
+        # All "section_name" sections are removed from the Config
+        _config_2 = _Config._default_config()
+        _config_2._sections[SectionForTest.name] = {"section_1": self.section_1}
+        config_diff = Config._comparator._find_conflict_config(_config_2, _config_1)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["removed_items"]) == 1
+        assert conflicted_config_diff["removed_items"][0] == (
+            ("section_name", None, None),
+            {"section_1": {"attribute": "attribute_1", "prop": "prop_1"}},
+        )
+        assert conflicted_config_diff.get("modified_items") is None
+        assert conflicted_config_diff.get("added_items") is None
+
+        # Section "section_1" is removed from the Config
+        _config_3 = _Config._default_config()
+        _config_3._sections[SectionForTest.name] = {"section_1": self.section_1, "section_2": self.section_2}
+        config_diff = Config._comparator._find_conflict_config(_config_3, _config_2)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["removed_items"]) == 1
+        assert conflicted_config_diff["removed_items"][0] == (
+            ("section_name", "section_2", None),
+            {"attribute": "2:int", "prop": "prop_2"},
+        )
+        assert conflicted_config_diff.get("modified_items") is None
+        assert conflicted_config_diff.get("added_items") is None
+
+    def test_comparator_with_modified_section(self):
+        _config_1 = _Config._default_config()
+        _config_1._sections[SectionForTest.name] = {"section_2": self.section_2}
+
+        # All "section_name" sections are removed from the Config
+        _config_2 = _Config._default_config()
+        _config_2._sections[SectionForTest.name] = {"section_2": self.section_2b}
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["modified_items"]) == 2
+        assert conflicted_config_diff["modified_items"][0] == (
+            ("section_name", "section_2", "attribute"),
+            ("2:int", "attribute_2"),
+        )
+        assert conflicted_config_diff["modified_items"][1] == (
+            ("section_name", "section_2", "prop"),
+            ("prop_2", "prop_2b"),
+        )
+        assert conflicted_config_diff.get("removed_items") is None
+        assert conflicted_config_diff.get("added_items") is None
+
+    def test_comparator_with_modified_list_attribute(self):
+        _config_1 = _Config._default_config()
+        _config_1._sections[SectionForTest.name] = {"section_3": self.section_3}
+
+        # All "section_name" sections are removed from the Config
+        _config_2 = _Config._default_config()
+        _config_2._sections[SectionForTest.name] = {"section_3": self.section_3b}
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["modified_items"]) == 2
+        assert conflicted_config_diff["modified_items"][0] == (
+            ("section_name", "section_3", "prop"),
+            (["prop_1"], ["prop_1", "prop_2", "prop_3"]),
+        )
+        assert conflicted_config_diff["modified_items"][1] == (
+            ("section_name", "section_3", "attribute"),
+            (["1:int", "2:int", "3:int", "4:int"], ["1:int", "2:int"]),
+        )
+        assert conflicted_config_diff.get("removed_items") is None
+        assert conflicted_config_diff.get("added_items") is None
+
+    def test_comparator_with_different_order_list_attributes(self):
+        _config_1 = _Config._default_config()
+        _config_1._unique_sections
+        _config_1._sections[SectionForTest.name] = {"section_3": self.section_3b}
+
+        # Create _config_2 with different order of list attributes
+        _config_2 = _Config._default_config()
+        _config_2._sections[SectionForTest.name] = {"section_3": self.section_3c}
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        # There should be no difference since the order of list attributes is ignored
+        assert config_diff == {}
+
+    def test_comparator_with_new_unique_section(self):
+        _config_1 = _Config._default_config()
+
+        _config_2 = _Config._default_config()
+        _config_2._unique_sections[UniqueSectionForTest.name] = self.unique_section_1
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["added_items"]) == 1
+        assert conflicted_config_diff["added_items"][0] == (
+            ("unique_section_name", None, None),
+            {"attribute": "unique_attribute_1", "prop": "unique_prop_1"},
+        )
+        assert conflicted_config_diff.get("modified_items") is None
+        assert conflicted_config_diff.get("removed_items") is None
+
+    def test_comparator_with_removed_unique_section(self):
+        _config_1 = _Config._default_config()
+
+        _config_2 = _Config._default_config()
+        _config_2._unique_sections[UniqueSectionForTest.name] = self.unique_section_1
+        config_diff = Config._comparator._find_conflict_config(_config_2, _config_1)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["removed_items"]) == 1
+        assert conflicted_config_diff["removed_items"][0] == (
+            ("unique_section_name", None, None),
+            {"attribute": "unique_attribute_1", "prop": "unique_prop_1"},
+        )
+        assert conflicted_config_diff.get("modified_items") is None
+        assert conflicted_config_diff.get("added_items") is None
+
+    def test_comparator_with_modified_unique_section(self):
+        _config_1 = _Config._default_config()
+        _config_1._unique_sections[UniqueSectionForTest.name] = self.unique_section_1
+
+        # All "section_name" sections are removed from the Config
+        _config_2 = _Config._default_config()
+        _config_2._unique_sections[UniqueSectionForTest.name] = self.unique_section_1b
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        conflicted_config_diff = config_diff["conflicted_sections"]
+        assert len(conflicted_config_diff["modified_items"]) == 1
+        assert conflicted_config_diff["modified_items"][0] == (
+            ("unique_section_name", "prop", None),
+            ("unique_prop_1", "unique_prop_1b"),
+        )
+        assert conflicted_config_diff.get("removed_items") is None
+        assert conflicted_config_diff.get("added_items") is None
+
+    def test_unconflicted_section_name_store_statically(self):
+        Config._comparator._add_unconflicted_section("section_name_1")
+        assert Config._comparator._unconflicted_sections == {"section_name_1"}
+
+        Config._comparator._add_unconflicted_section("section_name_2")
+        assert Config._comparator._unconflicted_sections == {"section_name_1", "section_name_2"}
+
+        Config._comparator._add_unconflicted_section("section_name_1")
+        assert Config._comparator._unconflicted_sections == {"section_name_1", "section_name_2"}
+
+    def test_unconflicted_diff_is_stored_separated_from_conflicted_ones(self):
+        _config_1 = _Config._default_config()
+        _config_1._unique_sections[UniqueSectionForTest.name] = self.unique_section_1
+        _config_1._sections[SectionForTest.name] = {"section_2": self.section_2}
+
+        _config_2 = _Config._default_config()
+        _config_2._unique_sections[UniqueSectionForTest.name] = self.unique_section_1b
+        _config_2._sections[SectionForTest.name] = {"section_2": self.section_2b}
+
+        # Compare 2 Configuration
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        assert config_diff.get("unconflicted_sections") is None
+        assert config_diff.get("conflicted_sections") is not None
+        assert len(config_diff["conflicted_sections"]["modified_items"]) == 3
+
+        # Ignore any diff of "section_name" and compare
+        Config._comparator._add_unconflicted_section("section_name")
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+        assert config_diff.get("unconflicted_sections") is not None
+        assert len(config_diff["unconflicted_sections"]["modified_items"]) == 2
+        assert config_diff.get("conflicted_sections") is not None
+        assert len(config_diff["conflicted_sections"]["modified_items"]) == 1
+
+        # Ignore any diff of Global Config and compare
+        Config._comparator._add_unconflicted_section(["unique_section_name"])
+        config_diff = Config._comparator._find_conflict_config(_config_1, _config_2)
+        assert config_diff.get("unconflicted_sections") is not None
+        assert len(config_diff["unconflicted_sections"]["modified_items"]) == 3
+        assert config_diff.get("conflicted_sections") is None
+
+    def test_comparator_log_message(self, caplog):
+        _config_1 = _Config._default_config()
+        _config_1._unique_sections[UniqueSectionForTest.name] = self.unique_section_1
+        _config_1._sections[SectionForTest.name] = {"section_2": self.section_2}
+
+        _config_2 = _Config._default_config()
+        _config_2._unique_sections[UniqueSectionForTest.name] = self.unique_section_1b
+        _config_2._sections[SectionForTest.name] = {"section_2": self.section_2b}
+
+        # Ignore any diff of "section_name" and compare
+        Config._comparator._add_unconflicted_section("section_name")
+        Config._comparator._find_conflict_config(_config_1, _config_2)
+
+        error_messages = caplog.text.strip().split("\n")
+        assert len(error_messages) == 5
+        assert all(
+            t in error_messages[0]
+            for t in [
+                "INFO",
+                "There are non-conflicting changes between the current configuration and the current configuration:",
+            ]
+        )
+        assert 'section_name "section_2" has attribute "attribute" modified: 2:int -> attribute_2' in error_messages[1]
+        assert 'section_name "section_2" has attribute "prop" modified: prop_2 -> prop_2b' in error_messages[2]
+        assert all(
+            t in error_messages[3]
+            for t in [
+                "ERROR",
+                "The current configuration conflicts with the current configuration:",
+            ]
+        )
+        assert 'unique_section_name "prop" was modified: unique_prop_1 -> unique_prop_1b' in error_messages[4]
+
+        caplog.clear()
+
+        Config._comparator._find_conflict_config(_config_1, _config_2, old_version_number="1.0")
+
+        error_messages = caplog.text.strip().split("\n")
+        assert len(error_messages) == 5
+        assert all(
+            t in error_messages[0]
+            for t in [
+                "INFO",
+                "There are non-conflicting changes between the configuration for version 1.0 and the current "
+                "configuration:",
+            ]
+        )
+        assert all(
+            t in error_messages[3]
+            for t in [
+                "ERROR",
+                "The configuration for version 1.0 conflicts with the current configuration:",
+            ]
+        )
+
+        caplog.clear()
+
+        Config._comparator._compare(
+            _config_1,
+            _config_2,
+            version_number_1="1.0",
+            version_number_2="2.0",
+        )
+
+        error_messages = caplog.text.strip().split("\n")
+        assert len(error_messages) == 3
+        assert all(
+            t in error_messages[0]
+            for t in ["INFO", "Differences between version 1.0 Configuration and version 2.0 Configuration:"]
+        )
+
+        caplog.clear()

+ 48 - 0
src/taipy/config/tests/config/test_default_config.py

@@ -0,0 +1,48 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from src.taipy.config.config import Config
+from src.taipy.config.global_app.global_app_config import GlobalAppConfig
+from src.taipy.config.section import Section
+from tests.config.utils.section_for_tests import SectionForTest
+from tests.config.utils.unique_section_for_tests import UniqueSectionForTest
+
+
+def _test_default_global_app_config(global_config: GlobalAppConfig):
+    assert global_config is not None
+    assert not global_config.notification
+    assert len(global_config.properties) == 0
+
+
+def test_default_configuration():
+    default_config = Config._default_config
+    assert default_config._unique_sections is not None
+    assert len(default_config._unique_sections) == 1
+    assert default_config._unique_sections[UniqueSectionForTest.name] is not None
+    assert default_config._unique_sections[UniqueSectionForTest.name].attribute == "default_attribute"
+    assert default_config._sections is not None
+    assert len(default_config._sections) == 1
+
+    _test_default_global_app_config(default_config._global_config)
+    _test_default_global_app_config(Config.global_config)
+    _test_default_global_app_config(GlobalAppConfig().default_config())
+
+
+def test_register_default_configuration():
+    Config._register_default(SectionForTest(Section._DEFAULT_KEY, "default_attribute", prop1="prop1"))
+
+    # Replace the first default section
+    Config._register_default(SectionForTest(Section._DEFAULT_KEY, "default_attribute", prop2="prop2"))
+
+    default_section = Config.sections[SectionForTest.name][Section._DEFAULT_KEY]
+    assert len(default_section.properties) == 1
+    assert default_section.prop2 == "prop2"
+    assert default_section.prop1 is None

+ 58 - 0
src/taipy/config/tests/config/test_env_file_config.py

@@ -0,0 +1,58 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+
+import pytest
+
+from src.taipy.config.config import Config
+from src.taipy.config.exceptions.exceptions import ConfigurationUpdateBlocked
+from tests.config.utils.named_temporary_file import NamedTemporaryFile
+
+config_from_filename = NamedTemporaryFile(
+    """
+[TAIPY]
+custom_property_not_overwritten = true
+custom_property_overwritten = 10
+"""
+)
+
+config_from_environment = NamedTemporaryFile(
+    """
+[TAIPY]
+custom_property_overwritten = 11
+"""
+)
+
+
+def test_load_from_environment_overwrite_load_from_filename():
+    os.environ[Config._ENVIRONMENT_VARIABLE_NAME_WITH_CONFIG_PATH] = config_from_environment.filename
+    Config.load(config_from_filename.filename)
+
+    assert Config.global_config.custom_property_not_overwritten is True
+    assert Config.global_config.custom_property_overwritten == 11
+    os.environ.pop(Config._ENVIRONMENT_VARIABLE_NAME_WITH_CONFIG_PATH)
+
+
+def test_block_load_from_environment_overwrite_load_from_filename():
+    Config.load(config_from_filename.filename)
+    assert Config.global_config.custom_property_not_overwritten is True
+    assert Config.global_config.custom_property_overwritten == 10
+
+    Config.block_update()
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        os.environ[Config._ENVIRONMENT_VARIABLE_NAME_WITH_CONFIG_PATH] = config_from_environment.filename
+        Config.load(config_from_filename.filename)
+
+    os.environ.pop(Config._ENVIRONMENT_VARIABLE_NAME_WITH_CONFIG_PATH)
+    assert Config.global_config.custom_property_not_overwritten is True
+    assert Config.global_config.custom_property_overwritten == 10  # The Config.load is failed to override

+ 42 - 0
src/taipy/config/tests/config/test_file_config.py

@@ -0,0 +1,42 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import pytest
+
+from src.taipy.config.config import Config
+from src.taipy.config.exceptions.exceptions import LoadingError
+from tests.config.utils.named_temporary_file import NamedTemporaryFile
+
+
+def test_node_can_not_appear_twice():
+    config = NamedTemporaryFile(
+        """
+[unique_section_name]
+attribute = "my_attribute"
+
+[unique_section_name]
+attribute = "other_attribute"
+    """
+    )
+
+    with pytest.raises(LoadingError, match="Can not load configuration"):
+        Config.load(config.filename)
+
+
+def test_skip_configuration_outside_nodes():
+    config = NamedTemporaryFile(
+        """
+foo = "bar"
+    """
+    )
+
+    Config.load(config.filename)
+    assert Config.global_config.foo is None

+ 200 - 0
src/taipy/config/tests/config/test_override_config.py

@@ -0,0 +1,200 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+from unittest import mock
+
+import pytest
+
+from src.taipy.config.config import Config
+from src.taipy.config.exceptions.exceptions import InconsistentEnvVariableError, MissingEnvVariableError
+from tests.config.utils.named_temporary_file import NamedTemporaryFile
+
+
+def test_override_default_configuration_with_code_configuration():
+    assert not Config.global_config.root_folder == "foo"
+
+    assert len(Config.unique_sections) == 1
+    assert Config.unique_sections["unique_section_name"] is not None
+    assert Config.unique_sections["unique_section_name"].attribute == "default_attribute"
+    assert Config.unique_sections["unique_section_name"].prop is None
+
+    assert len(Config.sections) == 1
+    assert len(Config.sections["section_name"]) == 1
+    assert Config.sections["section_name"] is not None
+    assert Config.sections["section_name"]["default"].attribute == "default_attribute"
+
+    Config.configure_global_app(root_folder="foo")
+    assert Config.global_config.root_folder == "foo"
+
+    Config.configure_unique_section_for_tests("foo", prop="bar")
+    assert len(Config.unique_sections) == 1
+    assert Config.unique_sections["unique_section_name"] is not None
+    assert Config.unique_sections["unique_section_name"].attribute == "foo"
+    assert Config.unique_sections["unique_section_name"].prop == "bar"
+
+    Config.configure_section_for_tests("my_id", "baz", prop="qux")
+    assert len(Config.unique_sections) == 1
+    assert Config.sections["section_name"] is not None
+    assert Config.sections["section_name"]["my_id"].attribute == "baz"
+    assert Config.sections["section_name"]["my_id"].prop == "qux"
+
+
+def test_override_default_config_with_code_config_including_env_variable_values():
+    Config.configure_global_app()
+    assert Config.global_config.foo is None
+    Config.configure_global_app(foo="bar")
+    assert Config.global_config.foo == "bar"
+
+    with mock.patch.dict(os.environ, {"FOO": "foo"}):
+        Config.configure_global_app(foo="ENV[FOO]")
+        assert Config.global_config.foo == "foo"
+
+
+def test_override_default_configuration_with_file_configuration():
+    tf = NamedTemporaryFile(
+        """
+[TAIPY]
+foo = "bar"
+
+"""
+    )
+    assert Config.global_config.foo is None
+
+    Config.load(tf.filename)
+
+    assert Config.global_config.foo == "bar"
+
+
+def test_override_default_config_with_file_config_including_env_variable_values():
+    tf = NamedTemporaryFile(
+        """
+[TAIPY]
+foo_attribute = "ENV[FOO]:int"
+bar_attribute = "ENV[BAR]:bool"
+"""
+    )
+    assert Config.global_config.foo_attribute is None
+    assert Config.global_config.bar_attribute is None
+
+    with mock.patch.dict(os.environ, {"FOO": "foo", "BAR": "true"}):
+        with pytest.raises(InconsistentEnvVariableError):
+            Config.load(tf.filename)
+            Config.global_config.foo_attribute
+
+    with mock.patch.dict(os.environ, {"FOO": "5"}):
+        with pytest.raises(MissingEnvVariableError):
+            Config.load(tf.filename)
+            Config.global_config.bar_attribute
+
+    with mock.patch.dict(os.environ, {"FOO": "6", "BAR": "TRUe"}):
+        Config.load(tf.filename)
+        assert Config.global_config.foo_attribute == 6
+        assert Config.global_config.bar_attribute
+
+
+def test_code_configuration_does_not_override_file_configuration():
+    config_from_filename = NamedTemporaryFile(
+        """
+[TAIPY]
+foo = 2
+    """
+    )
+    Config.override(config_from_filename.filename)
+
+    Config.configure_global_app(foo=21)
+
+    assert Config.global_config.foo == 2  # From file config
+
+
+def test_code_configuration_does_not_override_file_configuration_including_env_variable_values():
+    config_from_filename = NamedTemporaryFile(
+        """
+[TAIPY]
+foo = 2
+    """
+    )
+    Config.override(config_from_filename.filename)
+
+    with mock.patch.dict(os.environ, {"FOO": "21"}):
+        Config.configure_global_app(foo="ENV[FOO]")
+        assert Config.global_config.foo == 2  # From file config
+
+
+def test_file_configuration_overrides_code_configuration():
+    config_from_filename = NamedTemporaryFile(
+        """
+[TAIPY]
+foo = 2
+    """
+    )
+    Config.configure_global_app(foo=21)
+    Config.load(config_from_filename.filename)
+
+    assert Config.global_config.foo == 2  # From file config
+
+
+def test_file_configuration_overrides_code_configuration_including_env_variable_values():
+    config_from_filename = NamedTemporaryFile(
+        """
+[TAIPY]
+foo = "ENV[FOO]:int"
+    """
+    )
+    Config.configure_global_app(foo=21)
+
+    with mock.patch.dict(os.environ, {"FOO": "2"}):
+        Config.load(config_from_filename.filename)
+        assert Config.global_config.foo == 2  # From file config
+
+
+def test_override_default_configuration_with_multiple_configurations():
+    file_config = NamedTemporaryFile(
+        """
+[TAIPY]
+foo = 10
+bar = "baz"
+    """
+    )
+    # Default config is applied
+    assert Config.global_config.foo is None
+    assert Config.global_config.bar is None
+
+    # Code config is applied
+    Config.configure_global_app(foo="bar")
+    assert Config.global_config.foo == "bar"
+    assert Config.global_config.bar is None
+
+    # File config is applied
+    Config.load(file_config.filename)
+    assert Config.global_config.foo == 10
+    assert Config.global_config.bar == "baz"
+
+
+def test_override_default_configuration_with_multiple_configurations_including_environment_variable_values():
+    file_config = NamedTemporaryFile(
+        """
+[TAIPY]
+att = "ENV[BAZ]"
+    """
+    )
+
+    with mock.patch.dict(os.environ, {"FOO": "bar", "BAZ": "qux"}):
+        # Default config is applied
+        assert Config.global_config.att is None
+
+        # Code config is applied
+        Config.configure_global_app(att="ENV[FOO]")
+        assert Config.global_config.att == "bar"
+
+        # File config is applied
+        Config.load(file_config.filename)
+        assert Config.global_config.att == "qux"

+ 47 - 0
src/taipy/config/tests/config/test_section.py

@@ -0,0 +1,47 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+from unittest import mock
+
+import pytest
+
+from src.taipy.config.exceptions.exceptions import InvalidConfigurationId
+from tests.config.utils.section_for_tests import SectionForTest
+from tests.config.utils.unique_section_for_tests import UniqueSectionForTest
+
+
+class WrongUniqueSection(UniqueSectionForTest):
+    name = "1wrong_id"
+
+
+class WrongSection(SectionForTest):
+    name = "correct_name"
+
+
+def test_section_uses_valid_id():
+    with pytest.raises(InvalidConfigurationId):
+        WrongUniqueSection(attribute="foo")
+    with pytest.raises(InvalidConfigurationId):
+        WrongSection("wrong id", attribute="foo")
+    with pytest.raises(InvalidConfigurationId):
+        WrongSection("1wrong_id", attribute="foo")
+    with pytest.raises(InvalidConfigurationId):
+        WrongSection("wrong_@id", attribute="foo")
+
+
+def test_templated_properties_are_replaced():
+    with mock.patch.dict(os.environ, {"foo": "bar", "baz": "1"}):
+        u_sect = UniqueSectionForTest(attribute="attribute", tpl_property="ENV[foo]")
+        assert u_sect.tpl_property == "bar"
+
+        sect = SectionForTest(id="my_id", attribute="attribute", tpl_property="ENV[baz]:int")
+        assert sect.tpl_property == 1

+ 169 - 0
src/taipy/config/tests/config/test_section_registration.py

@@ -0,0 +1,169 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import pytest
+
+from src.taipy.config import Config
+from src.taipy.config.exceptions.exceptions import ConfigurationUpdateBlocked
+from tests.config.utils.section_for_tests import SectionForTest
+from tests.config.utils.unique_section_for_tests import UniqueSectionForTest
+
+
+def test_unique_section_registration_and_usage():
+    assert Config.unique_sections is not None
+    assert Config.unique_sections[UniqueSectionForTest.name] is not None
+    assert Config.unique_sections[UniqueSectionForTest.name].attribute == "default_attribute"
+    assert Config.unique_sections[UniqueSectionForTest.name].prop is None
+
+    mySection = Config.configure_unique_section_for_tests(attribute="my_attribute", prop="my_prop")
+
+    assert Config.unique_sections is not None
+    assert Config.unique_sections[UniqueSectionForTest.name] is not None
+    assert mySection is not None
+    assert Config.unique_sections[UniqueSectionForTest.name].attribute == "my_attribute"
+    assert mySection.attribute == "my_attribute"
+    assert Config.unique_sections[UniqueSectionForTest.name].prop == "my_prop"
+    assert mySection.prop == "my_prop"
+
+    myNewSection = Config.configure_unique_section_for_tests(attribute="my_new_attribute", prop="my_new_prop")
+
+    assert Config.unique_sections is not None
+    assert Config.unique_sections[UniqueSectionForTest.name] is not None
+    assert myNewSection is not None
+    assert mySection is not None
+    assert Config.unique_sections[UniqueSectionForTest.name].attribute == "my_new_attribute"
+    assert myNewSection.attribute == "my_new_attribute"
+    assert mySection.attribute == "my_new_attribute"
+    assert Config.unique_sections[UniqueSectionForTest.name].prop == "my_new_prop"
+    assert myNewSection.prop == "my_new_prop"
+    assert mySection.prop == "my_new_prop"
+
+
+def test_sections_exposed_as_attribute():
+    assert Config.unique_section_name.attribute == "default_attribute"
+    Config.configure_unique_section_for_tests("my_attribute")
+    assert Config.unique_section_name.attribute == "my_attribute"
+
+    assert Config.section_name["default"].attribute == "default_attribute"
+    Config.configure_section_for_tests(id="my_id", attribute="my_attribute")
+    assert Config.section_name["my_id"].attribute == "my_attribute"
+
+
+def test_section_registration_and_usage():
+    assert Config.sections is not None
+    assert len(Config.sections) == 1
+    assert Config.sections[SectionForTest.name] is not None
+    assert len(Config.sections[SectionForTest.name]) == 1
+    assert Config.sections[SectionForTest.name]["default"] is not None
+    assert Config.sections[SectionForTest.name]["default"].attribute == "default_attribute"
+    assert Config.sections[SectionForTest.name]["default"].prop == "default_prop"
+    assert Config.sections[SectionForTest.name]["default"].foo is None
+
+    myFirstSection = Config.configure_section_for_tests(id="first", attribute="my_attribute", prop="my_prop", foo="bar")
+    assert Config.sections is not None
+    assert len(Config.sections) == 1
+    assert Config.sections[SectionForTest.name] is not None
+    assert len(Config.sections[SectionForTest.name]) == 2
+    assert Config.sections[SectionForTest.name]["default"] is not None
+    assert Config.sections[SectionForTest.name]["default"].attribute == "default_attribute"
+    assert Config.sections[SectionForTest.name]["default"].prop == "default_prop"
+    assert Config.sections[SectionForTest.name]["default"].foo is None
+    assert Config.sections[SectionForTest.name]["first"] is not None
+    assert Config.sections[SectionForTest.name]["first"].attribute == "my_attribute"
+    assert Config.sections[SectionForTest.name]["first"].prop == "my_prop"
+    assert Config.sections[SectionForTest.name]["first"].foo == "bar"
+    assert myFirstSection.attribute == "my_attribute"
+    assert myFirstSection.prop == "my_prop"
+    assert myFirstSection.foo == "bar"
+
+    myNewSection = Config.configure_section_for_tests(id="second", attribute="my_new_attribute", prop="my_new_prop")
+    assert Config.sections is not None
+    assert len(Config.sections) == 1
+    assert Config.sections[SectionForTest.name] is not None
+    assert len(Config.sections[SectionForTest.name]) == 3
+    assert Config.sections[SectionForTest.name]["default"] is not None
+    assert Config.sections[SectionForTest.name]["default"].attribute == "default_attribute"
+    assert Config.sections[SectionForTest.name]["default"].prop == "default_prop"
+    assert Config.sections[SectionForTest.name]["default"].foo is None
+    assert Config.sections[SectionForTest.name]["first"] is not None
+    assert Config.sections[SectionForTest.name]["first"].attribute == "my_attribute"
+    assert Config.sections[SectionForTest.name]["first"].prop == "my_prop"
+    assert Config.sections[SectionForTest.name]["first"].foo == "bar"
+    assert Config.sections[SectionForTest.name]["second"] is not None
+    assert Config.sections[SectionForTest.name]["second"].attribute == "my_new_attribute"
+    assert Config.sections[SectionForTest.name]["second"].prop == "my_new_prop"
+    assert Config.sections[SectionForTest.name]["second"].foo is None
+    assert myFirstSection.attribute == "my_attribute"
+    assert myFirstSection.prop == "my_prop"
+    assert myFirstSection.foo == "bar"
+    assert myNewSection.attribute == "my_new_attribute"
+    assert myNewSection.prop == "my_new_prop"
+    assert myNewSection.foo is None
+
+    my2ndSection = Config.configure_section_for_tests(id="second", attribute="my_2nd_attribute", prop="my_2nd_prop")
+    assert Config.sections is not None
+    assert len(Config.sections) == 1
+    assert Config.sections[SectionForTest.name] is not None
+    assert len(Config.sections[SectionForTest.name]) == 3
+    assert Config.sections[SectionForTest.name]["default"] is not None
+    assert Config.sections[SectionForTest.name]["default"].attribute == "default_attribute"
+    assert Config.sections[SectionForTest.name]["default"].prop == "default_prop"
+    assert Config.sections[SectionForTest.name]["default"].foo is None
+    assert Config.sections[SectionForTest.name]["first"] is not None
+    assert Config.sections[SectionForTest.name]["first"].attribute == "my_attribute"
+    assert Config.sections[SectionForTest.name]["first"].prop == "my_prop"
+    assert Config.sections[SectionForTest.name]["first"].foo == "bar"
+    assert Config.sections[SectionForTest.name]["second"] is not None
+    assert Config.sections[SectionForTest.name]["second"].attribute == "my_2nd_attribute"
+    assert Config.sections[SectionForTest.name]["second"].prop == "my_2nd_prop"
+    assert Config.sections[SectionForTest.name]["second"].foo is None
+    assert myFirstSection.attribute == "my_attribute"
+    assert myFirstSection.prop == "my_prop"
+    assert myFirstSection.foo == "bar"
+    assert myNewSection.attribute == "my_2nd_attribute"
+    assert myNewSection.prop == "my_2nd_prop"
+    assert myNewSection.foo is None
+    assert my2ndSection.attribute == "my_2nd_attribute"
+    assert my2ndSection.prop == "my_2nd_prop"
+    assert my2ndSection.foo is None
+
+
+def test_block_registration():
+    myUniqueSection = Config.configure_unique_section_for_tests(attribute="my_unique_attribute", prop="my_unique_prop")
+    mySection = Config.configure_section_for_tests(id="section_id", attribute="my_attribute", prop="my_prop", foo="bar")
+
+    Config.block_update()
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        Config.configure_unique_section_for_tests(attribute="my_new_unique_attribute", prop="my_new_unique_prop")
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        Config.configure_section_for_tests(id="new", attribute="my_attribute", prop="my_prop", foo="bar")
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        myUniqueSection.attribute = "foo"
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        myUniqueSection.properties = {"foo": "bar"}
+
+    # myUniqueSection stay the same
+    assert myUniqueSection.attribute == "my_unique_attribute"
+    assert myUniqueSection.properties == {"prop": "my_unique_prop"}
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        mySection.attribute = "foo"
+
+    with pytest.raises(ConfigurationUpdateBlocked):
+        mySection.properties = {"foo": "foo"}
+
+    # mySection stay the same
+    assert mySection.attribute == "my_attribute"
+    assert mySection.properties == {"prop": "my_prop", "foo": "bar", "prop_int": 0}

+ 476 - 0
src/taipy/config/tests/config/test_section_serialization.py

@@ -0,0 +1,476 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import datetime
+import json
+import os
+from unittest import mock
+
+from src.taipy.config import Config
+from src.taipy.config._serializer._json_serializer import _JsonSerializer
+from src.taipy.config.common.frequency import Frequency
+from src.taipy.config.common.scope import Scope
+
+from tests.config.utils.named_temporary_file import NamedTemporaryFile
+from tests.config.utils.section_for_tests import SectionForTest
+from tests.config.utils.unique_section_for_tests import UniqueSectionForTest
+
+
+def add(a, b):
+    return a + b
+
+
+class CustomClass:
+    a = None
+    b = None
+
+
+class CustomEncoder(json.JSONEncoder):
+    def default(self, o):
+        if isinstance(o, datetime):
+            result = {"__type__": "Datetime", "__value__": o.isoformat()}
+        else:
+            result = json.JSONEncoder.default(self, o)
+        return result
+
+
+class CustomDecoder(json.JSONDecoder):
+    def __init__(self, *args, **kwargs):
+        json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
+
+    def object_hook(self, source):
+        if source.get("__type__") == "Datetime":
+            return datetime.fromisoformat(source.get("__value__"))
+        else:
+            return source
+
+
+def test_write_toml_configuration_file():
+    expected_toml_config = """
+[TAIPY]
+
+[unique_section_name]
+attribute = "my_attribute"
+prop = "my_prop"
+prop_int = "1:int"
+prop_bool = "False:bool"
+prop_list = [ "p1", "1991-01-01T00:00:00:datetime", "1d0h0m0s:timedelta",]
+prop_scope = "SCENARIO:SCOPE"
+prop_freq = "QUARTERLY:FREQUENCY"
+baz = "ENV[QUX]"
+quux = "ENV[QUUZ]:bool"
+corge = [ "grault", "ENV[GARPLY]", "ENV[WALDO]:int", "3.0:float",]
+
+[section_name.default]
+attribute = "default_attribute"
+prop = "default_prop"
+prop_int = "0:int"
+
+[section_name.my_id]
+attribute = "my_attribute"
+prop = "default_prop"
+prop_int = "1:int"
+prop_bool = "False:bool"
+prop_list = [ "unique_section_name:SECTION",]
+prop_scope = "SCENARIO"
+baz = "ENV[QUX]"
+    """.strip()
+    tf = NamedTemporaryFile()
+    with mock.patch.dict(
+        os.environ, {"FOO": "in_memory", "QUX": "qux", "QUUZ": "true", "GARPLY": "garply", "WALDO": "17"}
+    ):
+        unique_section = Config.configure_unique_section_for_tests(
+            attribute="my_attribute",
+            prop="my_prop",
+            prop_int=1,
+            prop_bool=False,
+            prop_list=["p1", datetime.datetime(1991, 1, 1), datetime.timedelta(days=1)],
+            prop_scope=Scope.SCENARIO,
+            prop_freq=Frequency.QUARTERLY,
+            baz="ENV[QUX]",
+            quux="ENV[QUUZ]:bool",
+            corge=("grault", "ENV[GARPLY]", "ENV[WALDO]:int", 3.0),
+        )
+        Config.configure_section_for_tests(
+            "my_id",
+            "my_attribute",
+            prop_int=1,
+            prop_bool=False,
+            prop_list=[unique_section],
+            prop_scope="SCENARIO",
+            baz="ENV[QUX]",
+        )
+
+        Config.backup(tf.filename)
+        actual_config = tf.read().strip()
+        assert actual_config == expected_toml_config
+
+
+def test_read_toml_configuration_file():
+    toml_config = """
+[TAIPY]
+foo = "bar"
+
+[unique_section_name]
+attribute = "my_attribute"
+prop = "my_prop"
+prop_int = "1:int"
+prop_bool = "False:bool"
+prop_list = [ "p1", "1991-01-01T00:00:00:datetime", "1d0h0m0s:timedelta",]
+prop_scope = "SCENARIO:SCOPE"
+prop_freq = "QUARTERLY:FREQUENCY"
+baz = "ENV[QUX]"
+quux = "ENV[QUUZ]:bool"
+corge = [ "grault", "ENV[GARPLY]", "ENV[WALDO]:int", "3.0:float",]
+
+[TAIPY.custom_properties]
+bar = "baz"
+
+[section_name.default]
+attribute = "default_attribute"
+prop = "default_prop"
+prop_int = "0:int"
+
+[section_name.my_id]
+attribute = "my_attribute"
+prop = "default_prop"
+prop_int = "1:int"
+prop_bool = "False:bool"
+prop_list = [ "unique_section_name", "section_name.my_id",]
+prop_scope = "SCENARIO:SCOPE"
+baz = "ENV[QUX]"
+    """.strip()
+    tf = NamedTemporaryFile(toml_config)
+    with mock.patch.dict(
+        os.environ, {"FOO": "in_memory", "QUX": "qux", "QUUZ": "true", "GARPLY": "garply", "WALDO": "17"}
+    ):
+        Config.override(tf.filename)
+
+        assert Config.global_config.foo == "bar"
+        assert Config.global_config.custom_properties.get("bar") == "baz"
+
+        assert Config.unique_sections is not None
+        assert Config.unique_sections[UniqueSectionForTest.name] is not None
+        assert Config.unique_sections[UniqueSectionForTest.name].attribute == "my_attribute"
+        assert Config.unique_sections[UniqueSectionForTest.name].prop == "my_prop"
+        assert Config.unique_sections[UniqueSectionForTest.name].prop_int == 1
+        assert Config.unique_sections[UniqueSectionForTest.name].prop_bool is False
+        assert Config.unique_sections[UniqueSectionForTest.name].prop_list == [
+            "p1",
+            datetime.datetime(1991, 1, 1),
+            datetime.timedelta(days=1),
+        ]
+        assert Config.unique_sections[UniqueSectionForTest.name].prop_scope == Scope.SCENARIO
+        assert Config.unique_sections[UniqueSectionForTest.name].prop_freq == Frequency.QUARTERLY
+        assert Config.unique_sections[UniqueSectionForTest.name].baz == "qux"
+        assert Config.unique_sections[UniqueSectionForTest.name].quux is True
+        assert Config.unique_sections[UniqueSectionForTest.name].corge == [
+            "grault",
+            "garply",
+            17,
+            3.0,
+        ]
+
+        assert Config.sections is not None
+        assert len(Config.sections) == 1
+        assert Config.sections[SectionForTest.name] is not None
+        assert len(Config.sections[SectionForTest.name]) == 2
+        assert Config.sections[SectionForTest.name]["default"] is not None
+        assert Config.sections[SectionForTest.name]["default"].attribute == "default_attribute"
+        assert Config.sections[SectionForTest.name]["default"].prop == "default_prop"
+        assert Config.sections[SectionForTest.name]["default"].prop_int == 0
+        assert Config.sections[SectionForTest.name]["my_id"] is not None
+        assert Config.sections[SectionForTest.name]["my_id"].attribute == "my_attribute"
+        assert Config.sections[SectionForTest.name]["my_id"].prop == "default_prop"
+        assert Config.sections[SectionForTest.name]["my_id"].prop_int == 1
+        assert Config.sections[SectionForTest.name]["my_id"].prop_bool is False
+        assert Config.sections[SectionForTest.name]["my_id"].prop_list == ["unique_section_name", "section_name.my_id"]
+        assert Config.sections[SectionForTest.name]["my_id"].prop_scope == Scope.SCENARIO
+        assert Config.sections[SectionForTest.name]["my_id"].baz == "qux"
+
+        tf2 = NamedTemporaryFile()
+        Config.backup(tf2.filename)
+        actual_config_2 = tf2.read().strip()
+        assert actual_config_2 == toml_config
+
+
+def test_read_write_toml_configuration_file_with_function_and_class():
+    expected_toml_config = """
+[TAIPY]
+
+[unique_section_name] attribute = "my_attribute" prop = "my_prop" prop_list = [
+"tests.config.test_section_serialization.CustomEncoder:class",
+"tests.config.test_section_serialization.CustomDecoder:class",]
+
+[section_name.default]
+attribute = "default_attribute"
+prop = "default_prop"
+prop_int = "0:int"
+
+[section_name.my_id]
+attribute = "my_attribute"
+prop = "default_prop"
+prop_int = "0:int"
+prop_fct_list = [ "tests.config.test_section_serialization.add:function",]
+prop_class_list = [ "tests.config.test_section_serialization.CustomClass:class",]
+
+[section_name.my_id_2]
+attribute = "my_attribute_2"
+prop = "default_prop"
+prop_int = "0:int"
+prop_fct_list = [ "builtins.print:function", "builtins.pow:function",]
+    """.strip()
+
+    tf = NamedTemporaryFile()
+    Config.configure_unique_section_for_tests(
+        attribute="my_attribute",
+        prop="my_prop",
+        prop_list=[CustomEncoder, CustomDecoder],
+    )
+    Config.configure_section_for_tests(
+        "my_id",
+        "my_attribute",
+        prop_fct_list=[add],
+        prop_class_list=[CustomClass],
+    )
+
+    Config.configure_section_for_tests(
+        "my_id_2",
+        "my_attribute_2",
+        prop_fct_list=[print, pow],
+    )
+
+    Config.backup(tf.filename)
+    actual_exported_toml = tf.read().strip()
+    assert actual_exported_toml == expected_toml_config
+
+    Config.override(tf.filename)
+    tf2 = NamedTemporaryFile()
+    Config.backup(tf2.filename)
+
+    actual_exported_toml_2 = tf2.read().strip()
+    assert actual_exported_toml_2 == expected_toml_config
+
+
+def test_write_json_configuration_file():
+    expected_json_config = """
+{
+"TAIPY": {},
+"unique_section_name": {
+"attribute": "my_attribute",
+"prop": "my_prop",
+"prop_int": "1:int",
+"prop_bool": "False:bool",
+"prop_list": [
+"p1",
+"1991-01-01T00:00:00:datetime",
+"1d0h0m0s:timedelta"
+],
+"prop_scope": "SCENARIO:SCOPE",
+"prop_freq": "QUARTERLY:FREQUENCY"
+},
+"section_name": {
+"default": {
+"attribute": "default_attribute",
+"prop": "default_prop",
+"prop_int": "0:int"
+},
+"my_id": {
+"attribute": "my_attribute",
+"prop": "default_prop",
+"prop_int": "1:int",
+"prop_bool": "False:bool",
+"prop_list": [
+"unique_section_name:SECTION"
+],
+"prop_scope": "SCENARIO",
+"baz": "ENV[QUX]"
+}
+}
+}
+    """.strip()
+    tf = NamedTemporaryFile()
+    Config._serializer = _JsonSerializer()
+
+    unique_section = Config.configure_unique_section_for_tests(
+        attribute="my_attribute",
+        prop="my_prop",
+        prop_int=1,
+        prop_bool=False,
+        prop_list=["p1", datetime.datetime(1991, 1, 1), datetime.timedelta(days=1)],
+        prop_scope=Scope.SCENARIO,
+        prop_freq=Frequency.QUARTERLY,
+    )
+    Config.configure_section_for_tests(
+        "my_id",
+        "my_attribute",
+        prop_int=1,
+        prop_bool=False,
+        prop_list=[unique_section],
+        prop_scope="SCENARIO",
+        baz="ENV[QUX]",
+    )
+    Config.backup(tf.filename)
+    actual_config = tf.read()
+    assert actual_config == expected_json_config
+
+
+def test_read_json_configuration_file():
+    json_config = """
+{
+"TAIPY": {
+"root_folder": "./taipy/",
+"storage_folder": ".data/",
+"repository_type": "filesystem"
+},
+"unique_section_name": {
+"attribute": "my_attribute",
+"prop": "my_prop",
+"prop_int": "1:int",
+"prop_bool": "False:bool",
+"prop_list": [
+"p1",
+"1991-01-01T00:00:00:datetime",
+"1d0h0m0s:timedelta"
+],
+"prop_scope": "SCENARIO:SCOPE",
+"prop_freq": "QUARTERLY:FREQUENCY"
+},
+"section_name": {
+"default": {
+"attribute": "default_attribute",
+"prop": "default_prop",
+"prop_int": "0:int"
+},
+"my_id": {
+"attribute": "my_attribute",
+"prop": "default_prop",
+"prop_int": "1:int",
+"prop_bool": "False:bool",
+"prop_list": [
+"unique_section_name"
+],
+"prop_scope": "SCENARIO"
+}
+}
+}
+    """.strip()
+    Config._serializer = _JsonSerializer()
+    tf = NamedTemporaryFile(json_config)
+
+    Config.override(tf.filename)
+
+    assert Config.unique_sections is not None
+    assert Config.unique_sections[UniqueSectionForTest.name] is not None
+    assert Config.unique_sections[UniqueSectionForTest.name].attribute == "my_attribute"
+    assert Config.unique_sections[UniqueSectionForTest.name].prop == "my_prop"
+    assert Config.unique_sections[UniqueSectionForTest.name].prop_int == 1
+    assert Config.unique_sections[UniqueSectionForTest.name].prop_bool is False
+    assert Config.unique_sections[UniqueSectionForTest.name].prop_list == [
+        "p1",
+        datetime.datetime(1991, 1, 1),
+        datetime.timedelta(days=1),
+    ]
+    assert Config.unique_sections[UniqueSectionForTest.name].prop_scope == Scope.SCENARIO
+    assert Config.unique_sections[UniqueSectionForTest.name].prop_freq == Frequency.QUARTERLY
+
+    assert Config.sections is not None
+    assert len(Config.sections) == 1
+    assert Config.sections[SectionForTest.name] is not None
+    assert len(Config.sections[SectionForTest.name]) == 2
+    assert Config.sections[SectionForTest.name]["default"] is not None
+    assert Config.sections[SectionForTest.name]["default"].attribute == "default_attribute"
+    assert Config.sections[SectionForTest.name]["default"].prop == "default_prop"
+    assert Config.sections[SectionForTest.name]["default"].prop_int == 0
+    assert Config.sections[SectionForTest.name]["my_id"] is not None
+    assert Config.sections[SectionForTest.name]["my_id"].attribute == "my_attribute"
+    assert Config.sections[SectionForTest.name]["my_id"].prop == "default_prop"
+    assert Config.sections[SectionForTest.name]["my_id"].prop_int == 1
+    assert Config.sections[SectionForTest.name]["my_id"].prop_bool is False
+    assert Config.sections[SectionForTest.name]["my_id"].prop_list == ["unique_section_name"]
+
+    tf2 = NamedTemporaryFile()
+    Config.backup(tf2.filename)
+    actual_config_2 = tf2.read().strip()
+    assert actual_config_2 == json_config
+
+
+def test_read_write_json_configuration_file_with_function_and_class():
+    expected_json_config = """
+{
+"TAIPY": {},
+"unique_section_name": {
+"attribute": "my_attribute",
+"prop": "my_prop",
+"prop_list": [
+"tests.config.test_section_serialization.CustomEncoder:class",
+"tests.config.test_section_serialization.CustomDecoder:class"
+]
+},
+"section_name": {
+"default": {
+"attribute": "default_attribute",
+"prop": "default_prop",
+"prop_int": "0:int"
+},
+"my_id": {
+"attribute": "my_attribute",
+"prop": "default_prop",
+"prop_int": "0:int",
+"prop_fct_list": [
+"tests.config.test_section_serialization.add:function"
+],
+"prop_class_list": [
+"tests.config.test_section_serialization.CustomClass:class"
+]
+},
+"my_id_2": {
+"attribute": "my_attribute_2",
+"prop": "default_prop",
+"prop_int": "0:int",
+"prop_fct_list": [
+"builtins.print:function",
+"builtins.pow:function"
+]
+}
+}
+}
+    """.strip()
+
+    Config._serializer = _JsonSerializer()
+    tf = NamedTemporaryFile()
+    Config.configure_unique_section_for_tests(
+        attribute="my_attribute",
+        prop="my_prop",
+        prop_list=[CustomEncoder, CustomDecoder],
+    )
+    Config.configure_section_for_tests(
+        "my_id",
+        "my_attribute",
+        prop_fct_list=[add],
+        prop_class_list=[CustomClass],
+    )
+    Config.configure_section_for_tests(
+        "my_id_2",
+        "my_attribute_2",
+        prop_fct_list=[print, pow],
+    )
+
+    Config.backup(tf.filename)
+    actual_exported_json = tf.read().strip()
+    assert actual_exported_json == expected_json_config
+
+    Config.override(tf.filename)
+    tf2 = NamedTemporaryFile()
+    Config.backup(tf2.filename)
+
+    actual_exported_json_2 = tf2.read().strip()
+    assert actual_exported_json_2 == expected_json_config

+ 0 - 0
src/taipy/config/tests/config/utils/__init__.py


+ 18 - 0
src/taipy/config/tests/config/utils/checker_for_tests.py

@@ -0,0 +1,18 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from src.taipy.config import IssueCollector
+from src.taipy.config.checker._checkers._config_checker import _ConfigChecker
+
+
+class CheckerForTest(_ConfigChecker):
+    def _check(self) -> IssueCollector:
+        return self._collector

+ 28 - 0
src/taipy/config/tests/config/utils/named_temporary_file.py

@@ -0,0 +1,28 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+import os
+import tempfile
+
+
+class NamedTemporaryFile:
+    def __init__(self, content=None):
+        with tempfile.NamedTemporaryFile("w", delete=False) as fd:
+            if content:
+                fd.write(content)
+            self.filename = fd.name
+
+    def read(self):
+        with open(self.filename, "r") as fp:
+            return fp.read()
+
+    def __del__(self):
+        os.unlink(self.filename)

+ 69 - 0
src/taipy/config/tests/config/utils/section_for_tests.py

@@ -0,0 +1,69 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from copy import copy
+from typing import Any, Dict, Optional
+
+from src.taipy.config import Config, Section
+from src.taipy.config._config import _Config
+from src.taipy.config.common._config_blocker import _ConfigBlocker
+
+
+class SectionForTest(Section):
+    name = "section_name"
+    _MY_ATTRIBUTE_KEY = "attribute"
+
+    def __init__(self, id: str, attribute: Any = None, **properties):
+        self._attribute = attribute
+        super().__init__(id, **properties)
+
+    def __copy__(self):
+        return SectionForTest(self.id, self._attribute, **copy(self._properties))
+
+    @property
+    def attribute(self):
+        return self._replace_templates(self._attribute)
+
+    @attribute.setter  # type: ignore
+    @_ConfigBlocker._check()
+    def attribute(self, val):
+        self._attribute = val
+
+    def _clean(self):
+        self._attribute = None
+        self._properties.clear()
+
+    def _to_dict(self):
+        as_dict = {}
+        if self._attribute is not None:
+            as_dict[self._MY_ATTRIBUTE_KEY] = self._attribute
+        as_dict.update(self._properties)
+        return as_dict
+
+    @classmethod
+    def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config] = None):
+        as_dict.pop(cls._ID_KEY, id)
+        attribute = as_dict.pop(cls._MY_ATTRIBUTE_KEY, None)
+        return SectionForTest(id=id, attribute=attribute, **as_dict)
+
+    def _update(self, as_dict: Dict[str, Any], default_section=None):
+        self._attribute = as_dict.pop(self._MY_ATTRIBUTE_KEY, self._attribute)
+        if self._attribute is None and default_section:
+            self._attribute = default_section._attribute
+        self._properties.update(as_dict)
+        if default_section:
+            self._properties = {**default_section.properties, **self._properties}
+
+    @staticmethod
+    def _configure(id: str, attribute: str, **properties):
+        section = SectionForTest(id, attribute, **properties)
+        Config._register(section)
+        return Config.sections[SectionForTest.name][id]

+ 98 - 0
src/taipy/config/tests/config/utils/section_of_sections_list_for_tests.py

@@ -0,0 +1,98 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from copy import copy
+from typing import Any, Dict, List, Optional
+
+from src.taipy.config import Config, Section
+from src.taipy.config._config import _Config
+from src.taipy.config.common._config_blocker import _ConfigBlocker
+
+from .section_for_tests import SectionForTest
+
+
+class SectionOfSectionsListForTest(Section):
+    name = "list_section_name"
+    _MY_ATTRIBUTE_KEY = "attribute"
+    _SECTIONS_LIST_KEY = "sections_list"
+
+    def __init__(self, id: str, attribute: Any = None, sections_list: List = None, **properties):
+        self._attribute = attribute
+        self._sections_list = sections_list if sections_list else []
+        super().__init__(id, **properties)
+
+    def __copy__(self):
+        return SectionOfSectionsListForTest(
+            self.id, self._attribute, copy(self._sections_list), **copy(self._properties)
+        )
+
+    @property
+    def attribute(self):
+        return self._replace_templates(self._attribute)
+
+    @attribute.setter  # type: ignore
+    @_ConfigBlocker._check()
+    def attribute(self, val):
+        self._attribute = val
+
+    @property
+    def sections_list(self):
+        return list(self._sections_list)
+
+    @sections_list.setter  # type: ignore
+    @_ConfigBlocker._check()
+    def sections_list(self, val):
+        self._sections_list = val
+
+    def _clean(self):
+        self._attribute = None
+        self._sections_list = []
+        self._properties.clear()
+
+    def _to_dict(self):
+        as_dict = {}
+        if self._attribute is not None:
+            as_dict[self._MY_ATTRIBUTE_KEY] = self._attribute
+        if self._sections_list:
+            as_dict[self._SECTIONS_LIST_KEY] = self._sections_list
+        as_dict.update(self._properties)
+        return as_dict
+
+    @classmethod
+    def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config] = None):
+        as_dict.pop(cls._ID_KEY, id)
+        attribute = as_dict.pop(cls._MY_ATTRIBUTE_KEY, None)
+        section_configs = config._sections.get(SectionForTest.name, None) or []  # type: ignore
+        sections_list = []
+        if inputs_as_str := as_dict.pop(cls._SECTIONS_LIST_KEY, None):
+            for section_id in inputs_as_str:
+                if section_id in section_configs:
+                    sections_list.append(section_configs[section_id])
+                else:
+                    sections_list.append(section_id)
+        return SectionOfSectionsListForTest(id=id, attribute=attribute, sections_list=sections_list, **as_dict)
+
+    def _update(self, as_dict: Dict[str, Any], default_section=None):
+        self._attribute = as_dict.pop(self._MY_ATTRIBUTE_KEY, self._attribute)
+        if self._attribute is None and default_section:
+            self._attribute = default_section._attribute
+        self._sections_list = as_dict.pop(self._SECTIONS_LIST_KEY, self._sections_list)
+        if self._sections_list is None and default_section:
+            self._sections_list = default_section._sections_list
+        self._properties.update(as_dict)
+        if default_section:
+            self._properties = {**default_section.properties, **self._properties}
+
+    @staticmethod
+    def _configure(id: str, attribute: str, sections_list: List = None, **properties):
+        section = SectionOfSectionsListForTest(id, attribute, sections_list, **properties)
+        Config._register(section)
+        return Config.sections[SectionOfSectionsListForTest.name][id]

+ 70 - 0
src/taipy/config/tests/config/utils/unique_section_for_tests.py

@@ -0,0 +1,70 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
+from copy import copy
+from typing import Any, Dict, Optional
+
+from src.taipy.config import Config
+from src.taipy.config._config import _Config
+from src.taipy.config.common._config_blocker import _ConfigBlocker
+from src.taipy.config.unique_section import UniqueSection
+
+
+class UniqueSectionForTest(UniqueSection):
+    name = "unique_section_name"
+    _MY_ATTRIBUTE_KEY = "attribute"
+
+    def __init__(self, attribute: str = None, **properties):
+        self._attribute = attribute
+        super().__init__(**properties)
+
+    def __copy__(self):
+        return UniqueSectionForTest(self._attribute, **copy(self._properties))
+
+    @property
+    def attribute(self):
+        return self._replace_templates(self._attribute)
+
+    @attribute.setter  # type: ignore
+    @_ConfigBlocker._check()
+    def attribute(self, val):
+        self._attribute = val
+
+    def _clean(self):
+        self._attribute = None
+        self._properties.clear()
+
+    def _to_dict(self):
+        as_dict = {}
+        if self._attribute is not None:
+            as_dict[self._MY_ATTRIBUTE_KEY] = self._attribute
+        as_dict.update(self._properties)
+        return as_dict
+
+    @classmethod
+    def _from_dict(cls, as_dict: Dict[str, Any], id=None, config: Optional[_Config] = None):
+        as_dict.pop(cls._ID_KEY, None)
+        attribute = as_dict.pop(cls._MY_ATTRIBUTE_KEY, None)
+        return UniqueSectionForTest(attribute=attribute, **as_dict)
+
+    def _update(self, as_dict: Dict[str, Any], default_section=None):
+        self._attribute = as_dict.pop(self._MY_ATTRIBUTE_KEY, self._attribute)
+        if self._attribute is None and default_section:
+            self._attribute = default_section._attribute
+        self._properties.update(as_dict)
+        if default_section:
+            self._properties = {**default_section.properties, **self._properties}
+
+    @staticmethod
+    def _configure(attribute: str, **properties):
+        section = UniqueSectionForTest(attribute, **properties)
+        Config._register(section)
+        return Config.unique_sections[UniqueSectionForTest.name]

+ 10 - 0
src/taipy/config/tests/logger/__init__.py

@@ -0,0 +1,10 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.

+ 27 - 0
src/taipy/config/tests/logger/logger.conf

@@ -0,0 +1,27 @@
+[loggers]
+keys=root,Taipy
+
+[handlers]
+keys=consoleHandler
+
+[formatters]
+keys=simpleFormatter
+
+[logger_root]
+level=DEBUG
+handlers=consoleHandler
+
+[logger_Taipy]
+level=DEBUG
+handlers=consoleHandler
+qualname=Taipy
+propagate=0
+
+[handler_consoleHandler]
+class=StreamHandler
+level=DEBUG
+formatter=simpleFormatter
+args=(sys.stdout,)
+
+[formatter_simpleFormatter]
+format=%(asctime)s - %(name)s - %(levelname)s - %(message)s

Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden.