trgiangdo 1 год назад
Родитель
Сommit
19ff2d4a0a
100 измененных файлов с 222 добавлено и 171 удалено
  1. 1 0
      doc/gui/examples/charts/advanced-selection.py
  2. 1 0
      doc/gui/examples/charts/bar-facing.py
  3. 1 0
      doc/gui/examples/charts/bar-multiple.py
  4. 1 0
      doc/gui/examples/charts/bar-simple.py
  5. 1 0
      doc/gui/examples/charts/bar-stacked.py
  6. 1 0
      doc/gui/examples/charts/basics-timeline.py
  7. 1 0
      doc/gui/examples/charts/candlestick-simple.py
  8. 1 0
      doc/gui/examples/charts/candlestick-styling.py
  9. 1 0
      doc/gui/examples/charts/continuous-error-multiple.py
  10. 1 0
      doc/gui/examples/charts/heatmap-unequal-cell-sizes.py
  11. 1 0
      doc/gui/examples/charts/line-style.py
  12. 1 0
      doc/gui/examples/charts/line-texts.py
  13. 1 0
      doc/gui/examples/charts/map-bubbles.py
  14. 1 0
      doc/gui/examples/charts/polar-tick-texts.py
  15. 1 0
      doc/gui/examples/charts/scatter-classification.py
  16. 1 1
      doc/gui/examples/charts/scatter-regression.py
  17. 1 0
      doc/gui/examples/charts/scatter-styling.py
  18. 12 0
      doc/gui/extension/main.py
  19. 7 7
      taipy/__init__.py
  20. 1 1
      taipy/_entrypoint.py
  21. 1 1
      taipy/config/_serializer/_json_serializer.py
  22. 1 1
      taipy/config/_serializer/_toml_serializer.py
  23. 1 1
      taipy/config/checker/_checkers/_config_checker.py
  24. 7 7
      taipy/config/common/_template_handler.py
  25. 1 1
      taipy/config/global_app/global_app_config.py
  26. 1 1
      taipy/config/stubs/generate_pyi.py
  27. 0 1
      taipy/config/unique_section.py
  28. 1 1
      taipy/core/_entity/_dag.py
  29. 7 7
      taipy/core/_entity/_labeled.py
  30. 4 4
      taipy/core/_entity/_migrate/_migrate_fs.py
  31. 3 3
      taipy/core/_entity/_migrate/_migrate_mongo.py
  32. 1 1
      taipy/core/_entity/_migrate/_migrate_sql.py
  33. 1 1
      taipy/core/_entity/_properties.py
  34. 1 1
      taipy/core/_entity/submittable.py
  35. 0 1
      taipy/core/_manager/_manager.py
  36. 0 1
      taipy/core/_orchestrator/_orchestrator.py
  37. 1 1
      taipy/core/_repository/_decoder.py
  38. 4 7
      taipy/core/_repository/_filesystem_repository.py
  39. 1 1
      taipy/core/_repository/db/_sql_connection.py
  40. 2 2
      taipy/core/_version/_cli/_version_cli.py
  41. 1 1
      taipy/core/_version/_version_fs_repository.py
  42. 2 2
      taipy/core/_version/_version_manager.py
  43. 1 1
      taipy/core/config/task_config.py
  44. 1 1
      taipy/core/cycle/_cycle_manager.py
  45. 1 1
      taipy/core/data/__init__.py
  46. 1 1
      taipy/core/data/_abstract_sql.py
  47. 1 1
      taipy/core/data/_data_manager.py
  48. 3 3
      taipy/core/data/_filter.py
  49. 4 4
      taipy/core/data/aws_s3.py
  50. 1 1
      taipy/core/data/data_node.py
  51. 4 3
      taipy/core/data/excel.py
  52. 1 1
      taipy/core/data/json.py
  53. 2 2
      taipy/core/data/mongo.py
  54. 1 0
      taipy/core/data/parquet.py
  55. 1 0
      taipy/core/data/pickle.py
  56. 1 0
      taipy/core/data/sql.py
  57. 1 1
      taipy/core/job/_job_converter.py
  58. 6 6
      taipy/core/scenario/scenario.py
  59. 3 3
      taipy/core/sequence/_sequence_manager.py
  60. 2 2
      taipy/gui/_renderers/_markdown/blocproc.py
  61. 11 4
      taipy/gui/_renderers/builder.py
  62. 1 1
      taipy/gui/builder/_element.py
  63. 2 1
      taipy/gui/config.py
  64. 1 1
      taipy/gui/extension/library.py
  65. 9 6
      taipy/gui/gui.py
  66. 7 2
      taipy/gui/gui_actions.py
  67. 1 1
      taipy/gui/page.py
  68. 5 4
      taipy/gui/server.py
  69. 0 1
      taipy/gui/state.py
  70. 2 2
      taipy/gui/utils/chart_config_builder.py
  71. 0 1
      taipy/gui/utils/filename.py
  72. 1 2
      taipy/gui/utils/get_module_name.py
  73. 1 1
      taipy/gui/utils/proxy.py
  74. 2 2
      taipy/gui/utils/types.py
  75. 13 2
      taipy/gui_core/_adapters.py
  76. 5 8
      taipy/gui_core/_context.py
  77. 1 0
      taipy/rest/api/error_handler.py
  78. 1 0
      taipy/rest/api/resources/job.py
  79. 1 0
      taipy/rest/api/resources/scenario.py
  80. 1 0
      taipy/rest/api/resources/task.py
  81. 1 0
      taipy/rest/api/views.py
  82. 1 0
      taipy/templates/default/{{cookiecutter.__root_folder_name}}/configuration/config.py
  83. 0 3
      tests/config/checker/checkers/test_checker.py
  84. 0 1
      tests/config/test_config_comparator.py
  85. 2 2
      tests/config/test_override_config.py
  86. 1 1
      tests/config/test_section_serialization.py
  87. 1 0
      tests/core/_entity/test_labelled.py
  88. 6 4
      tests/core/_entity/test_migrate_cli.py
  89. 9 10
      tests/core/_orchestrator/test_orchestrator.py
  90. 1 0
      tests/core/_orchestrator/test_orchestrator_factory.py
  91. 7 5
      tests/core/common/test_retry.py
  92. 5 5
      tests/core/common/test_warn_if_inputs_not_ready.py
  93. 1 0
      tests/core/config/checkers/test_job_config_checker.py
  94. 1 0
      tests/core/config/checkers/test_migration_config_checker.py
  95. 1 1
      tests/core/config/test_config_serialization.py
  96. 3 3
      tests/core/config/test_core_section.py
  97. 1 0
      tests/core/config/test_core_version.py
  98. 1 0
      tests/core/config/test_data_node_config.py
  99. 0 2
      tests/core/config/test_job_config.py
  100. 2 8
      tests/core/data/test_aws_s3_data_node.py

+ 1 - 0
doc/gui/examples/charts/advanced-selection.py

@@ -16,6 +16,7 @@
 import random
 
 import numpy
+
 from taipy.gui import Gui
 
 # x = [0..20]

+ 1 - 0
doc/gui/examples/charts/bar-facing.py

@@ -15,6 +15,7 @@
 # -----------------------------------------------------------------------------------------
 # Face-to-face bar charts example
 import numpy
+
 from taipy.gui import Gui
 
 n_years = 10

+ 1 - 0
doc/gui/examples/charts/bar-multiple.py

@@ -15,6 +15,7 @@
 # -----------------------------------------------------------------------------------------
 # Two data sets as a bar chart
 import pandas
+
 from taipy.gui import Gui
 
 # Source https://en.wikipedia.org/wiki/List_of_United_States_presidential_elections_by_popular_vote_margin

+ 1 - 0
doc/gui/examples/charts/bar-simple.py

@@ -14,6 +14,7 @@
 #     python <script>
 # -----------------------------------------------------------------------------------------
 import pandas
+
 from taipy.gui import Gui
 
 # Source https://en.wikipedia.org/wiki/List_of_United_States_presidential_elections_by_popular_vote_margin

+ 1 - 0
doc/gui/examples/charts/bar-stacked.py

@@ -14,6 +14,7 @@
 #     python <script>
 # -----------------------------------------------------------------------------------------
 import pandas
+
 from taipy.gui import Gui
 
 # Source https://en.wikipedia.org/wiki/List_of_United_States_presidential_elections_by_popular_vote_margin

+ 1 - 0
doc/gui/examples/charts/basics-timeline.py

@@ -15,6 +15,7 @@
 # -----------------------------------------------------------------------------------------
 import numpy
 import pandas
+
 from taipy.gui import Gui
 
 # Generate a random value for every hour on a given day

+ 1 - 0
doc/gui/examples/charts/candlestick-simple.py

@@ -15,6 +15,7 @@
 # You may need to install the yfinance package as well.
 # -----------------------------------------------------------------------------------------
 import yfinance
+
 from taipy import Gui
 
 # Extraction of a month of stock data for AAPL using the

+ 1 - 0
doc/gui/examples/charts/candlestick-styling.py

@@ -15,6 +15,7 @@
 # You may need to install the yfinance package as well.
 # -----------------------------------------------------------------------------------------
 import yfinance
+
 from taipy.gui import Gui
 
 # Extraction of a few days of stock historical data for AAPL using

+ 1 - 0
doc/gui/examples/charts/continuous-error-multiple.py

@@ -16,6 +16,7 @@
 import datetime
 
 import dateutil.relativedelta
+
 from taipy.gui import Gui
 
 # Data is collected from January 1st, 2010, every month

+ 1 - 0
doc/gui/examples/charts/heatmap-unequal-cell-sizes.py

@@ -16,6 +16,7 @@
 from itertools import accumulate
 
 import numpy as np
+
 from taipy.gui import Gui
 
 grid_size = 10

+ 1 - 0
doc/gui/examples/charts/line-style.py

@@ -14,6 +14,7 @@
 #     python <script>
 # -----------------------------------------------------------------------------------------
 import pandas
+
 from taipy.gui import Gui
 
 dates = pandas.date_range("2023-01-01", periods=365, freq="D")

+ 1 - 0
doc/gui/examples/charts/line-texts.py

@@ -15,6 +15,7 @@
 # -----------------------------------------------------------------------------------------
 import numpy
 import pandas
+
 from taipy.gui import Gui
 
 dates = pandas.date_range("2023-01-01", periods=365, freq="D")

+ 1 - 0
doc/gui/examples/charts/map-bubbles.py

@@ -15,6 +15,7 @@
 # -----------------------------------------------------------------------------------------
 import numpy
 import pandas
+
 from taipy.gui import Gui
 
 # Largest cities: name, location and population

+ 1 - 0
doc/gui/examples/charts/polar-tick-texts.py

@@ -16,6 +16,7 @@
 from datetime import datetime
 
 import numpy
+
 from taipy.gui import Gui
 
 

+ 1 - 0
doc/gui/examples/charts/scatter-classification.py

@@ -17,6 +17,7 @@
 import numpy
 import pandas
 from sklearn.datasets import make_classification
+
 from taipy.gui import Gui
 
 # Let scikit-learn generate a random 2-class classification problem

+ 1 - 1
doc/gui/examples/charts/scatter-regression.py

@@ -14,10 +14,10 @@
 #     python <script>
 # You may need to install the scikit-learn package as well.
 # -----------------------------------------------------------------------------------------
-from os.path import exists
 
 from sklearn.datasets import make_regression
 from sklearn.linear_model import LinearRegression
+
 from taipy.gui import Gui
 
 # Let scikit-learn generate a random regression problem

+ 1 - 0
doc/gui/examples/charts/scatter-styling.py

@@ -17,6 +17,7 @@
 import numpy as np
 import pandas as pd
 from sklearn.datasets import make_classification
+
 from taipy.gui import Gui
 
 # Let scikit-learn generate a random 2-class classification problem

+ 12 - 0
doc/gui/extension/main.py

@@ -1,7 +1,19 @@
+# Copyright 2023 Avaiga Private Limited
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations under the License.
+
 import random
 import string
 
 from example_library import ExampleLibrary
+
 from taipy.gui import Gui
 
 # Initial value

+ 7 - 7
taipy/__init__.py

@@ -13,22 +13,22 @@ from importlib.util import find_spec
 
 if find_spec("taipy"):
     if find_spec("taipy.config"):
-        from taipy.config._init import *  # type: ignore
+        from taipy.config._init import *  # noqa: F403
 
     if find_spec("taipy.gui"):
-        from taipy.gui._init import *  # type: ignore
+        from taipy.gui._init import *  # noqa: F403
 
     if find_spec("taipy.core"):
-        from taipy.core._init import *  # type: ignore
+        from taipy.core._init import *  # noqa: F403
 
     if find_spec("taipy.rest"):
-        from taipy.rest._init import *  # type: ignore
+        from taipy.rest._init import *  # noqa: F403
 
     if find_spec("taipy.gui_core"):
-        from taipy.gui_core._init import *  # type: ignore
+        from taipy.gui_core._init import *  # noqa: F403
 
     if find_spec("taipy.enterprise"):
-        from taipy.enterprise._init import *  # type: ignore
+        from taipy.enterprise._init import *  # noqa: F403
 
     if find_spec("taipy._run"):
-        from taipy._run import _run as run  # type: ignore
+        from taipy._run import _run as run  # noqa: F401, F403

+ 1 - 1
taipy/_entrypoint.py

@@ -41,7 +41,7 @@ def _entrypoint():
 
     args = _CLI._parse()
     if args.version:
-        print(f"Taipy {_get_version()}")
+        print(f"Taipy {_get_version()}")  # noqa: T201
         sys.exit(0)
 
     _RunCLI.parse_arguments()

+ 1 - 1
taipy/config/_serializer/_json_serializer.py

@@ -32,7 +32,7 @@ class _JsonSerializer(_BaseSerializer):
             return cls._from_dict(config_as_dict)
         except json.JSONDecodeError as e:
             error_msg = f"Can not load configuration {e}"
-            raise LoadingError(error_msg)
+            raise LoadingError(error_msg) from None
 
     @classmethod
     def _serialize(cls, configuration: _Config) -> str:

+ 1 - 1
taipy/config/_serializer/_toml_serializer.py

@@ -31,7 +31,7 @@ class _TomlSerializer(_BaseSerializer):
             return cls._from_dict(config_as_dict)
         except toml.TomlDecodeError as e:
             error_msg = f"Can not load configuration {e}"
-            raise LoadingError(error_msg)
+            raise LoadingError(error_msg) from None
 
     @classmethod
     def _serialize(cls, configuration: _Config) -> str:

+ 1 - 1
taipy/config/checker/_checkers/_config_checker.py

@@ -54,7 +54,7 @@ class _ConfigChecker:
         else:
             if not (
                 (isinstance(config_value, List) or isinstance(config_value, Set))
-                and all(map(lambda x: isinstance(x, child_config_class), config_value))
+                and all(isinstance(x, child_config_class) for x in config_value)
             ):
                 self._error(
                     config_key,

+ 7 - 7
taipy/config/common/_template_handler.py

@@ -84,21 +84,21 @@ class _TemplateHandler:
         try:
             return int(val)
         except ValueError:
-            raise InconsistentEnvVariableError(f"{val} is not an integer.")
+            raise InconsistentEnvVariableError(f"{val} is not an integer.") from None
 
     @staticmethod
     def _to_float(val: str) -> float:
         try:
             return float(val)
         except ValueError:
-            raise InconsistentEnvVariableError(f"{val} is not a float.")
+            raise InconsistentEnvVariableError(f"{val} is not a float.") from None
 
     @staticmethod
     def _to_datetime(val: str) -> datetime:
         try:
             return datetime.fromisoformat(val)
         except ValueError:
-            raise InconsistentEnvVariableError(f"{val} is not a valid datetime.")
+            raise InconsistentEnvVariableError(f"{val} is not a valid datetime.") from None
 
     @staticmethod
     def _to_timedelta(val: str) -> timedelta:
@@ -125,14 +125,14 @@ class _TemplateHandler:
         try:
             return Scope[str.upper(val)]
         except Exception:
-            raise InconsistentEnvVariableError(f"{val} is not a valid scope.")
+            raise InconsistentEnvVariableError(f"{val} is not a valid scope.") from None
 
     @staticmethod
     def _to_frequency(val: str) -> Frequency:
         try:
             return Frequency[str.upper(val)]
         except Exception:
-            raise InconsistentEnvVariableError(f"{val} is not a valid frequency.")
+            raise InconsistentEnvVariableError(f"{val} is not a valid frequency.") from None
 
     @staticmethod
     def _to_function(val: str):
@@ -141,11 +141,11 @@ class _TemplateHandler:
             module = import_module(module_name)
             return attrgetter(fct_name)(module)
         except Exception:
-            raise InconsistentEnvVariableError(f"{val} is not a valid function.")
+            raise InconsistentEnvVariableError(f"{val} is not a valid function.") from None
 
     @staticmethod
     def _to_class(val: str):
         try:
             return locate(val)
         except Exception:
-            raise InconsistentEnvVariableError(f"{val} is not a valid class.")
+            raise InconsistentEnvVariableError(f"{val} is not a valid class.") from None

+ 1 - 1
taipy/config/global_app/global_app_config.py

@@ -11,7 +11,7 @@
 
 from __future__ import annotations
 
-from typing import Any, Dict, Optional, Union
+from typing import Any, Dict, Optional
 
 from ..common._config_blocker import _ConfigBlocker
 from ..common._template_handler import _TemplateHandler as _tpl

+ 1 - 1
taipy/config/stubs/generate_pyi.py

@@ -95,7 +95,7 @@ def _build_entity_config_pyi(base_pyi, filename, entity_map):
             func = __add_docstring(func, lines, end_line) + "\n"
             base_pyi += func
         except Exception:
-            print(f"key={k}")
+            print(f"key={k}")  # noqa: T201
             raise
 
     return base_pyi

+ 0 - 1
taipy/config/unique_section.py

@@ -11,7 +11,6 @@
 
 from abc import ABC
 
-from .common._validate_id import _validate_id
 from .section import Section
 
 

+ 1 - 1
taipy/core/_entity/_dag.py

@@ -31,7 +31,7 @@ class _Edge:
 
 class _DAG:
     def __init__(self, dag: nx.DiGraph):
-        self._sorted_nodes = list(nodes for nodes in nx.topological_generations(dag))
+        self._sorted_nodes = [nodes for nodes in nx.topological_generations(dag)]
         self._length, self._width = self.__compute_size()
         self._grid_length, self._grid_width = self.__compute_grid_size()
         self._nodes = self.__compute_nodes()

+ 7 - 7
taipy/core/_entity/_labeled.py

@@ -43,7 +43,7 @@ class _Labeled:
         ls = []
         if not simple:
             if owner_id := self._get_owner_id():
-                if getattr(self, "id") != owner_id:
+                if getattr(self, "id") != owner_id:  # noqa: B009
                     from ... import core as tp
 
                     owner = tp.get(owner_id)
@@ -53,24 +53,24 @@ class _Labeled:
 
     def _get_explicit_label(self) -> Optional[str]:
         if hasattr(self, "_properties"):
-            return getattr(self, "_properties").get("label")
+            return getattr(self, "_properties").get("label")  # noqa: B009
         return None
 
     def _get_owner_id(self) -> Optional[str]:
         if hasattr(self, "owner_id"):
-            return getattr(self, "owner_id")
+            return getattr(self, "owner_id")  # noqa: B009
         return None
 
     def _get_name(self) -> Optional[str]:
         if hasattr(self, "name"):
-            return getattr(self, "name")
+            return getattr(self, "name")  # noqa: B009
         if hasattr(self, "_properties"):
-            return getattr(self, "_properties").get("name")
+            return getattr(self, "_properties").get("name")  # noqa: B009
         return None
 
     def _get_config_id(self) -> Optional[str]:
         if hasattr(self, "config_id"):
-            return getattr(self, "config_id")
+            return getattr(self, "config_id")  # noqa: B009
         return None
 
     def _generate_entity_label(self) -> str:
@@ -78,4 +78,4 @@ class _Labeled:
             return name
         if config_id := self._get_config_id():
             return config_id
-        return getattr(self, "id")
+        return getattr(self, "id")  # noqa: B009

+ 4 - 4
taipy/core/_entity/_migrate/_migrate_fs.py

@@ -24,16 +24,16 @@ __logger = _TaipyLogger._get_logger()
 def _load_all_entities_from_fs(root: str) -> Dict:
     # run through all files in the data folder and load them
     entities = {}
-    for root, dirs, files in os.walk(root):
+    for dirpath, _, files in os.walk(root):
         for file in files:
             if file.endswith(".json"):
-                with open(os.path.join(root, file)) as f:
+                with open(os.path.join(dirpath, file)) as f:
                     _id = file.split(".")[0]
-                    if "version" in root:
+                    if "version" in dirpath:
                         _id = f"VERSION_{_id}"
                     entities[_id] = {
                         "data": json.load(f),
-                        "path": os.path.join(root, file),
+                        "path": os.path.join(dirpath, file),
                     }
     return entities
 

+ 3 - 3
taipy/core/_entity/_migrate/_migrate_mongo.py

@@ -16,6 +16,7 @@ from typing import Dict
 
 import bson
 import pymongo
+
 from taipy.logger._taipy_logger import _TaipyLogger
 
 from ._utils import _migrate
@@ -82,9 +83,8 @@ def __write_entities_to_mongo(
     client = _connect_mongodb(hostname, port, user, password)
     for collection in NEW_COLLECTIONS:
         db = client[DATABASE_NAME]
-        db[collection].insert_many(
-            [entity["data"] for entity in _entities.values() if collection in entity["data"]["id"]]
-        )
+        if insert_data := [entity["data"] for entity in _entities.values() if collection in entity["data"]["id"]]:
+            db[collection].insert_many(insert_data)
 
 
 def _backup_mongo_entities(

+ 1 - 1
taipy/core/_entity/_migrate/_migrate_sql.py

@@ -136,7 +136,7 @@ def __write_entities_to_sql(_entities: Dict, _versions: Dict, db_file: str):
         elif "CYCLE" in k:
             __insert_cycle(entity["data"], conn)
 
-    for k, version in _versions.items():
+    for _, version in _versions.items():
         __insert_version(version, conn)
 
 

+ 1 - 1
taipy/core/_entity/_properties.py

@@ -11,7 +11,7 @@
 
 from collections import UserDict
 
-from ..notification import _ENTITY_TO_EVENT_ENTITY_TYPE, EventOperation, Notifier, _make_event
+from ..notification import EventOperation, Notifier, _make_event
 
 
 class _Properties(UserDict):

+ 1 - 1
taipy/core/_entity/submittable.py

@@ -129,7 +129,7 @@ class Submittable:
         dag = self._build_dag()
         remove = [node for node, degree in dict(dag.in_degree).items() if degree == 0 and isinstance(node, DataNode)]
         dag.remove_nodes_from(remove)
-        return list(nodes for nodes in nx.topological_generations(dag) if (Task in (type(node) for node in nodes)))
+        return [nodes for nodes in nx.topological_generations(dag) if (Task in (type(node) for node in nodes))]
 
     def _add_subscriber(self, callback: Callable, params: Optional[List[Any]] = None):
         params = [] if params is None else params

+ 0 - 1
taipy/core/_manager/_manager.py

@@ -10,7 +10,6 @@
 # specific language governing permissions and limitations under the License.
 
 import pathlib
-from importlib import metadata
 from typing import Dict, Generic, Iterable, List, Optional, TypeVar, Union
 
 from taipy.logger._taipy_logger import _TaipyLogger

+ 0 - 1
taipy/core/_orchestrator/_orchestrator.py

@@ -24,7 +24,6 @@ from ..data._data_manager_factory import _DataManagerFactory
 from ..job._job_manager_factory import _JobManagerFactory
 from ..job.job import Job
 from ..job.job_id import JobId
-from ..scenario.scenario import Scenario
 from ..submission._submission_manager_factory import _SubmissionManagerFactory
 from ..task.task import Task
 from ._abstract_orchestrator import _AbstractOrchestrator

+ 1 - 1
taipy/core/_repository/_decoder.py

@@ -16,7 +16,7 @@ from datetime import datetime, timedelta
 
 class _Decoder(json.JSONDecoder):
     def __init__(self, *args, **kwargs):
-        json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
+        json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
 
     def _str_to_timedelta(self, timedelta_str: str) -> timedelta:
         """

+ 4 - 7
taipy/core/_repository/_filesystem_repository.py

@@ -75,7 +75,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
         try:
             file_content = self.__read_file(path)
         except (FileNotFoundError, FileCannotBeRead):
-            raise ModelNotFound(str(self.dir_path), entity_id)
+            raise ModelNotFound(str(self.dir_path), entity_id) from None
 
         return self.__file_content_to_entity(file_content)
 
@@ -93,7 +93,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
         try:
             self.__get_path(entity_id).unlink()
         except FileNotFoundError:
-            raise ModelNotFound(str(self.dir_path), entity_id)
+            raise ModelNotFound(str(self.dir_path), entity_id) from None
 
     def _delete_all(self):
         shutil.rmtree(self.dir_path, ignore_errors=True)
@@ -189,10 +189,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
     ):
         try:
             files = filter(lambda f: config_id in f.name, self.dir_path.iterdir())
-            entities = map(
-                lambda f: self.__file_content_to_entity(self.__filter_by(f, filters)),
-                files,
-            )
+            entities = (self.__file_content_to_entity(self.__filter_by(f, filters)) for f in files)
             corresponding_entities = filter(
                 lambda e: e is not None and e.config_id == config_id and e.owner_id == owner_id,  # type: ignore
                 entities,
@@ -258,4 +255,4 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
                 file_content = f.read()
             return file_content
         except Exception:
-            raise FileCannotBeRead(str(filepath))
+            raise FileCannotBeRead(str(filepath)) from None

+ 1 - 1
taipy/core/_repository/db/_sql_connection.py

@@ -80,7 +80,7 @@ def _build_connection() -> Connection:
     try:
         db_location = properties["db_location"]
     except KeyError:
-        raise MissingRequiredProperty("Missing property db_location.")
+        raise MissingRequiredProperty("Missing property db_location.") from None
 
     return __build_connection(db_location)
 

+ 2 - 2
taipy/core/_version/_cli/_version_cli.py

@@ -71,7 +71,7 @@ class _VersionCLI:
             return
 
         if args.list:
-            print(cls.__list_versions())
+            print(cls.__list_versions())  # noqa: T201
             sys.exit(0)
 
         if args.rename:
@@ -99,7 +99,7 @@ class _VersionCLI:
                 )
                 sys.exit(0)
             except VersionIsNotProductionVersion as e:
-                raise SystemExit(e)
+                raise SystemExit(e) from None
 
         if args.delete:
             if clean_all_entities_by_version(args.delete):

+ 1 - 1
taipy/core/_version/_version_fs_repository.py

@@ -131,4 +131,4 @@ class _VersionFSRepository(_FileSystemRepository, _VersionRepositoryInterface):
                 )
             )
         except FileNotFoundError:
-            raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.")
+            raise VersionIsNotProductionVersion(f"Version '{version_number}' is not a production version.") from None

+ 2 - 2
taipy/core/_version/_version_manager.py

@@ -112,7 +112,7 @@ class _VersionManager(_Manager[_Version]):
             raise SystemExit(
                 f"Please add a new experiment version or run your application with --force option to"
                 f" override the Config of experiment {version_number}."
-            )
+            ) from None
         cls._repository._set_latest_version(version_number)
         return version_number
 
@@ -137,7 +137,7 @@ class _VersionManager(_Manager[_Version]):
                 f"Please add a new production version with migration functions.\n"
                 f"If old entities remain compatible with the new configuration, you can also run your application with"
                 f" --force option to override the production configuration of version {version_number}."
-            )
+            ) from None
         cls._repository._set_production_version(version_number)
         return version_number
 

+ 1 - 1
taipy/core/config/task_config.py

@@ -139,7 +139,7 @@ class TaskConfig(Section):
 
     def _update(self, as_dict, default_section=None):
         function = as_dict.pop(self._FUNCTION, None)
-        if function is not None and type(function) is not str:
+        if function is not None and not isinstance(function, str):
             self.function = function
         self._inputs = as_dict.pop(self._INPUT_KEY, self._inputs)
         if self._inputs is None and default_section:

+ 1 - 1
taipy/core/cycle/_cycle_manager.py

@@ -11,7 +11,7 @@
 
 import calendar
 from datetime import datetime, time, timedelta
-from typing import Callable, Dict, List, Optional
+from typing import Callable, List, Optional
 
 from taipy.config.common.frequency import Frequency
 

+ 1 - 1
taipy/core/data/__init__.py

@@ -9,6 +9,7 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
+from .aws_s3 import S3ObjectDataNode
 from .csv import CSVDataNode
 from .data_node import DataNode
 from .excel import ExcelDataNode
@@ -21,4 +22,3 @@ from .parquet import ParquetDataNode
 from .pickle import PickleDataNode
 from .sql import SQLDataNode
 from .sql_table import SQLTableDataNode
-from .aws_s3 import S3ObjectDataNode

+ 1 - 1
taipy/core/data/_abstract_sql.py

@@ -221,7 +221,7 @@ class _AbstractSQLDataNode(DataNode, _AbstractTabularDataNode):
         custom_class = self.properties[self.__EXPOSED_TYPE_PROPERTY]
         with self._get_engine().connect() as connection:
             query_result = connection.execute(text(self._get_read_query(operators, join_operator)))
-        return list(map(lambda row: custom_class(**row), query_result))
+        return [custom_class(**row) for row in query_result]
 
     def _read_as_numpy(
         self, operators: Optional[Union[List, Tuple]] = None, join_operator=JoinOperator.AND

+ 1 - 1
taipy/core/data/_data_manager.py

@@ -102,7 +102,7 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
                 properties=props,
             )
         except KeyError:
-            raise InvalidDataNodeType(data_node_config.storage_type)
+            raise InvalidDataNodeType(data_node_config.storage_type) from None
 
     @classmethod
     def _get_all(cls, version_number: Optional[str] = None) -> List[DataNode]:

+ 3 - 3
taipy/core/data/_filter.py

@@ -31,9 +31,9 @@ class _FilterDataNode:
     @staticmethod
     def __is_multi_sheet_excel(data) -> bool:
         if isinstance(data, Dict):
-            has_df_children = all([isinstance(e, (pd.DataFrame, modin_pd.DataFrame)) for e in data.values()])
-            has_list_children = all([isinstance(e, List) for e in data.values()])
-            has_np_array_children = all([isinstance(e, np.ndarray) for e in data.values()])
+            has_df_children = all(isinstance(e, (pd.DataFrame, modin_pd.DataFrame)) for e in data.values())
+            has_list_children = all(isinstance(e, List) for e in data.values())
+            has_np_array_children = all(isinstance(e, np.ndarray) for e in data.values())
             return has_df_children or has_list_children or has_np_array_children
         return False
 

+ 4 - 4
taipy/core/data/aws_s3.py

@@ -9,15 +9,15 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import boto3
 from datetime import datetime, timedelta
-from inspect import isclass
-from typing import Any, Dict, List, Optional, Set, Tuple, Union
+from typing import Any, Dict, List, Optional, Set
+
+import boto3
 
 from taipy.config.common.scope import Scope
 
 from .._version._version_manager_factory import _VersionManagerFactory
-from ..exceptions.exceptions import InvalidCustomDocument, MissingRequiredProperty
+from ..exceptions.exceptions import MissingRequiredProperty
 from .data_node import DataNode
 from .data_node_id import DataNodeId, Edit
 

+ 1 - 1
taipy/core/data/data_node.py

@@ -91,7 +91,7 @@ class DataNode(_Entity, _Labeled):
     def __init__(
         self,
         config_id,
-        scope: Scope = Scope(Scope.SCENARIO),
+        scope: Scope = Scope(Scope.SCENARIO),  # noqa: B008
         id: Optional[DataNodeId] = None,
         owner_id: Optional[str] = None,
         parent_ids: Optional[Set[str]] = None,

+ 4 - 3
taipy/core/data/excel.py

@@ -19,6 +19,7 @@ import modin.pandas as modin_pd
 import numpy as np
 import pandas as pd
 from openpyxl import load_workbook
+
 from taipy.config.common.scope import Scope
 
 from .._backup._backup import _replace_in_backup_file
@@ -342,7 +343,7 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
 
     def _append(self, data: Any):
         if isinstance(data, Dict) and all(
-            [isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()]
+            isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()
         ):
             self.__append_excel_with_multiple_sheets(data)
         elif isinstance(data, (pd.DataFrame, modin_pd.DataFrame)):
@@ -378,7 +379,7 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
 
     def _write(self, data: Any):
         if isinstance(data, Dict) and all(
-            [isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()]
+            isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()
         ):
             self.__write_excel_with_multiple_sheets(data)
         elif isinstance(data, (pd.DataFrame, modin_pd.DataFrame)):
@@ -395,7 +396,7 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
             job_id (JobId^): An optional identifier of the writer.
         """
         if isinstance(data, Dict) and all(
-            [isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()]
+            isinstance(x, (pd.DataFrame, modin_pd.DataFrame, np.ndarray)) for x in data.values()
         ):
             self.__write_excel_with_multiple_sheets(data, columns=columns)
         else:

+ 1 - 1
taipy/core/data/json.py

@@ -224,7 +224,7 @@ class _DefaultJSONEncoder(json.JSONEncoder):
 
 class _DefaultJSONDecoder(json.JSONDecoder):
     def __init__(self, *args, **kwargs):
-        json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
+        json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
 
     def object_hook(self, source):
         if _type := source.get("__type__"):

+ 2 - 2
taipy/core/data/mongo.py

@@ -178,11 +178,11 @@ class MongoCollectionDataNode(DataNode):
 
     def filter(self, operators: Optional[Union[List, Tuple]] = None, join_operator=JoinOperator.AND):
         cursor = self._read_by_query(operators, join_operator)
-        return list(map(lambda row: self._decoder(row), cursor))
+        return [self._decoder(row) for row in cursor]
 
     def _read(self):
         cursor = self._read_by_query()
-        return list(map(lambda row: self._decoder(row), cursor))
+        return [self._decoder(row) for row in cursor]
 
     def _read_by_query(self, operators: Optional[Union[List, Tuple]] = None, join_operator=JoinOperator.AND):
         """Query from a Mongo collection, exclude the _id field"""

+ 1 - 0
taipy/core/data/parquet.py

@@ -17,6 +17,7 @@ from typing import Any, Dict, List, Optional, Set
 import modin.pandas as modin_pd
 import numpy as np
 import pandas as pd
+
 from taipy.config.common.scope import Scope
 
 from .._backup._backup import _replace_in_backup_file

+ 1 - 0
taipy/core/data/pickle.py

@@ -15,6 +15,7 @@ from datetime import datetime, timedelta
 from typing import Any, List, Optional, Set
 
 import modin.pandas as pd
+
 from taipy.config.common.scope import Scope
 
 from .._backup._backup import _replace_in_backup_file

+ 1 - 0
taipy/core/data/sql.py

@@ -13,6 +13,7 @@ from datetime import datetime, timedelta
 from typing import Dict, List, Optional, Set
 
 from sqlalchemy import text
+
 from taipy.config.common.scope import Scope
 
 from .._version._version_manager_factory import _VersionManagerFactory

+ 1 - 1
taipy/core/job/_job_converter.py

@@ -57,7 +57,7 @@ class _JobConverter(_AbstractConverter):
                 fct_module, fct_name = it.get("fct_module"), it.get("fct_name")
                 job._subscribers.append(_load_fct(fct_module, fct_name))  # type: ignore
             except AttributeError:
-                raise InvalidSubscriber(f"The subscriber function {it.get('fct_name')} cannot be loaded.")
+                raise InvalidSubscriber(f"The subscriber function {it.get('fct_name')} cannot be loaded.") from None
         job._stacktrace = model.stacktrace
 
         return job

+ 6 - 6
taipy/core/scenario/scenario.py

@@ -108,10 +108,10 @@ class Scenario(_Entity, Submittable, _Labeled):
         self._properties = _Properties(self, **properties)
         self._sequences: Dict[str, Dict] = sequences or {}
 
-        _scenario_task_ids = set([task.id if isinstance(task, Task) else task for task in self._tasks])
+        _scenario_task_ids = set(task.id if isinstance(task, Task) else task for task in self._tasks)
         for sequence_name, sequence_data in self._sequences.items():
             sequence_task_ids = set(
-                [task.id if isinstance(task, Task) else task for task in sequence_data.get("tasks", [])]
+                task.id if isinstance(task, Task) else task for task in sequence_data.get("tasks", [])
             )
             self.__check_sequence_tasks_exist_in_scenario_tasks(
                 sequence_name, sequence_task_ids, self.id, _scenario_task_ids
@@ -195,8 +195,8 @@ class Scenario(_Entity, Submittable, _Labeled):
             SequenceTaskDoesNotExistInScenario^: If a task in the sequence does not exist in the scenario.
         """
         _scenario = _Reloader()._reload(self._MANAGER_NAME, self)
-        _scenario_task_ids = set([task.id if isinstance(task, Task) else task for task in _scenario._tasks])
-        _sequence_task_ids: Set[TaskId] = set([task.id if isinstance(task, Task) else task for task in tasks])
+        _scenario_task_ids = set(task.id if isinstance(task, Task) else task for task in _scenario._tasks)
+        _sequence_task_ids: Set[TaskId] = set(task.id if isinstance(task, Task) else task for task in tasks)
         self.__check_sequence_tasks_exist_in_scenario_tasks(name, _sequence_task_ids, self.id, _scenario_task_ids)
         _sequences = _Reloader()._reload(self._MANAGER_NAME, self)._sequences
         _sequences.update(
@@ -228,9 +228,9 @@ class Scenario(_Entity, Submittable, _Labeled):
             SequenceTaskDoesNotExistInScenario^: If a task in the sequence does not exist in the scenario.
         """
         _scenario = _Reloader()._reload(self._MANAGER_NAME, self)
-        _sc_task_ids = set([task.id if isinstance(task, Task) else task for task in _scenario._tasks])
+        _sc_task_ids = set(task.id if isinstance(task, Task) else task for task in _scenario._tasks)
         for name, tasks in sequences.items():
-            _seq_task_ids: Set[TaskId] = set([task.id if isinstance(task, Task) else task for task in tasks])
+            _seq_task_ids: Set[TaskId] = set(task.id if isinstance(task, Task) else task for task in tasks)
             self.__check_sequence_tasks_exist_in_scenario_tasks(name, _seq_task_ids, self.id, _sc_task_ids)
         # Need to parse twice the sequences to avoid adding some sequences and not others in case of exception
         for name, tasks in sequences.items():

+ 3 - 3
taipy/core/sequence/_sequence_manager.py

@@ -98,7 +98,7 @@ class _SequenceManager(_Manager[Sequence], _VersionMixin):
                     Notifier.publish(Event(cls._EVENT_ENTITY_TYPE, EventOperation.DELETION, entity_id=sequence_id))
         except (ModelNotFound, KeyError):
             cls.__log_error_entity_not_found(sequence_id)
-            raise ModelNotFound(cls._model_name, sequence_id)
+            raise ModelNotFound(cls._model_name, sequence_id) from None
 
     @classmethod
     def _delete_by_version(cls, version_number: str):
@@ -185,7 +185,7 @@ class _SequenceManager(_Manager[Sequence], _VersionMixin):
             return sequence_name, scenario_id
         except (ValueError, IndexError):
             cls._logger.error(f"SequenceId {sequence_id} is invalid.")
-            raise InvalidSequenceId(sequence_id)
+            raise InvalidSequenceId(sequence_id) from None
 
     @classmethod
     def _get(cls, sequence: Union[str, Sequence], default=None) -> Sequence:
@@ -227,7 +227,7 @@ class _SequenceManager(_Manager[Sequence], _VersionMixin):
         filtered_sequences = []
         for sequence in sequences:
             for filter in filters:
-                if all([getattr(sequence, key) == item for key, item in filter.items()]):
+                if all(getattr(sequence, key) == item for key, item in filter.items()):
                     filtered_sequences.append(sequence)
         return filtered_sequences
 

+ 2 - 2
taipy/gui/_renderers/_markdown/blocproc.py

@@ -36,7 +36,7 @@ class _StartBlockProcessor(BlockProcessor):
     def run(self, parent, blocks):
         original_block = blocks[0]
         original_match = re.search(_StartBlockProcessor.__RE_FENCE_START, original_block)
-        blocks[0] = re.sub(_StartBlockProcessor.__RE_FENCE_START, "", blocks[0], 1)
+        blocks[0] = re.sub(_StartBlockProcessor.__RE_FENCE_START, "", blocks[0], count=1)
         tag = original_match.group(1)
         queue = [tag]
         # Find block with ending fence
@@ -53,7 +53,7 @@ class _StartBlockProcessor(BlockProcessor):
                     _MarkdownFactory._TAIPY_START + tag + r"\.end(.*?)" + _MarkdownFactory._TAIPY_END,
                     "",
                     block,
-                    1,
+                    count=1,
                 )
                 # render fenced area inside a new div
                 e = _MarkdownFactory.create_element(self._gui, original_match.group(1), original_match.group(2))

+ 11 - 4
taipy/gui/_renderers/builder.py

@@ -77,13 +77,16 @@ class _Builder:
         control_type: str,
         element_name: str,
         attributes: t.Optional[t.Dict[str, t.Any]],
-        hash_names: t.Dict[str, str] = {},
+        hash_names: t.Dict[str, str] = None,
         default_value="<Empty>",
         lib_name: str = "taipy",
     ):
         from ..gui import Gui
         from .factory import _Factory
 
+        if hash_names is None:
+            hash_names = {}
+
         self.el = etree.Element(element_name)
 
         self.__control_type = control_type
@@ -142,8 +145,10 @@ class _Builder:
 
     @staticmethod
     def _get_variable_hash_names(
-        gui: "Gui", attributes: t.Dict[str, t.Any], hash_names: t.Dict[str, str] = {}
+        gui: "Gui", attributes: t.Dict[str, t.Any], hash_names: t.Dict[str, str] = None
     ) -> t.Dict[str, str]:
+        if hash_names is None:
+            hash_names = {}
         hashes = {}
         # Bind potential function and expressions in self.attributes
         for k, v in attributes.items():
@@ -294,7 +299,7 @@ class _Builder:
             try:
                 val = float(value)
             except ValueError:
-                raise ValueError(f"Property {name} expects a number for control {self.__control_type}")
+                raise ValueError(f"Property {name} expects a number for control {self.__control_type}") from None
         elif isinstance(value, numbers.Number):
             val = value  # type: ignore
         else:
@@ -799,7 +804,9 @@ class _Builder:
             self.set_attribute("updateVars", ";".join(self.__update_vars))
         return self
 
-    def _set_table_pagesize_options(self, default_size=[50, 100, 500]):
+    def _set_table_pagesize_options(self, default_size=None):
+        if default_size is None:
+            default_size = [50, 100, 500]
         page_size_options = self.__attributes.get("page_size_options", default_size)
         if isinstance(page_size_options, str):
             try:

+ 1 - 1
taipy/gui/builder/_element.py

@@ -60,7 +60,7 @@ class _Element(ABC):
         if isinstance(value, (str, dict, Iterable)):
             return value
         if hasattr(value, "__name__"):
-            return str(getattr(value, "__name__"))
+            return str(getattr(value, "__name__"))  # noqa: B009
         return str(value)
 
     @abstractmethod

+ 2 - 1
taipy/gui/config.py

@@ -17,9 +17,10 @@ from importlib.util import find_spec
 import pytz
 import tzlocal
 from dotenv import dotenv_values
-from taipy.logger._taipy_logger import _TaipyLogger
 from werkzeug.serving import is_running_from_reloader
 
+from taipy.logger._taipy_logger import _TaipyLogger
+
 from ._gui_cli import _GuiCLI
 from ._page import _Page
 from ._warnings import _warn

+ 1 - 1
taipy/gui/extension/library.py

@@ -387,7 +387,7 @@ class ElementLibrary(ABC):
         """
         return None
 
-    def on_user_init(self, state: "State"):
+    def on_user_init(self, state: "State"):  # noqa: B027
         """
         Initialize user state on first access.
 

+ 9 - 6
taipy/gui/gui.py

@@ -28,13 +28,14 @@ from importlib.util import find_spec
 from types import FrameType, SimpleNamespace
 from urllib.parse import unquote, urlencode, urlparse
 
-import __main__
 import markdown as md_lib
 import tzlocal
 from flask import Blueprint, Flask, g, jsonify, request, send_file, send_from_directory
-from taipy.logger._taipy_logger import _TaipyLogger
 from werkzeug.utils import secure_filename
 
+import __main__  # noqa: F401
+from taipy.logger._taipy_logger import _TaipyLogger
+
 if util.find_spec("pyngrok"):
     from pyngrok import ngrok
 
@@ -237,7 +238,7 @@ class Gui:
         page: t.Optional[t.Union[str, Page]] = None,
         pages: t.Optional[dict] = None,
         css_file: t.Optional[str] = None,
-        path_mapping: t.Optional[dict] = {},
+        path_mapping: t.Optional[dict] = None,
         env_filename: t.Optional[str] = None,
         libraries: t.Optional[t.List[ElementLibrary]] = None,
         flask: t.Optional[Flask] = None,
@@ -292,6 +293,8 @@ class Gui:
         self._set_css_file(css_file)
 
         # Preserve server config for server initialization
+        if path_mapping is None:
+            path_mapping = {}
         self._path_mapping = path_mapping
         self._flask = flask
 
@@ -552,7 +555,7 @@ class Gui:
     def __is_var_modified_in_context(self, var_name: str, derived_vars: t.Set[str]) -> bool:
         modified_vars: t.Optional[t.Set[str]] = getattr(g, "modified_vars", None)
         der_vars: t.Optional[t.Set[str]] = getattr(g, "derived_vars", None)
-        setattr(g, "update_count", getattr(g, "update_count", 0) + 1)
+        setattr(g, "update_count", getattr(g, "update_count", 0) + 1)  # noqa: B010
         if modified_vars is None:
             modified_vars = set()
             g.modified_vars = modified_vars
@@ -574,7 +577,7 @@ class Gui:
             delattr(g, "derived_vars")
             return derived_vars
         else:
-            setattr(g, "update_count", update_count)
+            setattr(g, "update_count", update_count)  # noqa: B010
             return None
 
     def _manage_message(self, msg_type: _WsType, message: dict) -> None:
@@ -980,7 +983,7 @@ class Gui:
                             # do not send data that is not serializable
                             continue
                 for w in debug_warnings:
-                    warnings.warn(w.message, w.category)
+                    warnings.warn(w.message, w.category)  # noqa: B028
             ws_dict[_var] = newvalue
         # TODO: What if value == newvalue?
         self.__send_ws_update_with_dict(ws_dict)

+ 7 - 2
taipy/gui/gui_actions.py

@@ -322,9 +322,9 @@ def invoke_state_callback(gui: Gui, state_id: str, callback: t.Callable, args: t
 def invoke_long_callback(
     state: State,
     user_function: t.Callable,
-    user_function_args: t.Union[t.Tuple, t.List] = [],
+    user_function_args: t.Union[t.Tuple, t.List] = None,
     user_status_function: t.Optional[t.Callable] = None,
-    user_status_function_args: t.Union[t.Tuple, t.List] = [],
+    user_status_function_args: t.Union[t.Tuple, t.List] = None,
     period=0,
 ):
     """Invoke a long running user callback.
@@ -369,6 +369,11 @@ def invoke_long_callback(
     """
     if not state or not isinstance(state._gui, Gui):
         _warn("'invoke_long_callback()' must be called in the context of a callback.")
+
+    if user_status_function_args is None:
+        user_status_function_args = []
+    if user_function_args is None:
+        user_function_args = []
         return
 
     state_id = get_state_id(state)

+ 1 - 1
taipy/gui/page.py

@@ -18,7 +18,7 @@ from types import FrameType
 from .utils import _filter_locals, _get_module_name_from_frame
 
 if t.TYPE_CHECKING:
-    from ._renderers import _Element
+    from ._renderers import _Element  # noqa: F401
 
 
 class Page:

+ 5 - 4
taipy/gui/server.py

@@ -23,15 +23,16 @@ import webbrowser
 from importlib import util
 from random import randint
 
-import __main__
 from flask import Blueprint, Flask, json, jsonify, render_template, send_from_directory
 from flask_cors import CORS
 from flask_socketio import SocketIO
 from gitignore_parser import parse_gitignore
 from kthread import KThread
-from taipy.logger._taipy_logger import _TaipyLogger
 from werkzeug.serving import is_running_from_reloader
 
+import __main__
+from taipy.logger._taipy_logger import _TaipyLogger
+
 from ._renderers.json import _TaipyJsonProvider
 from .config import ServerConfig
 from .utils import _is_in_notebook, _is_port_open, _RuntimeManager
@@ -159,10 +160,10 @@ class _Server:
                         css_vars=css_vars,
                         base_url=base_url,
                     )
-                except Exception:  # pragma: no cover
+                except Exception:
                     raise RuntimeError(
                         "Something is wrong with the taipy-gui front-end installation. Check that the js bundle has been properly built (is Node.js installed?)."  # noqa: E501
-                    )
+                    ) from None
 
             if path == "taipy.status.json":
                 return self._direct_render_json(self._gui._serve_status(pathlib.Path(template_folder) / path))

+ 0 - 1
taipy/gui/state.py

@@ -16,7 +16,6 @@ from operator import attrgetter
 from types import FrameType
 
 from flask import has_app_context
-from flask.ctx import AppContext
 
 from .utils import _get_module_name_from_frame, _is_in_notebook
 from .utils._attributes import _attrsetter

+ 2 - 2
taipy/gui/utils/chart_config_builder.py

@@ -208,10 +208,10 @@ def _build_chart_config(gui: "Gui", attributes: t.Dict[str, t.Any], col_types: t
             used_cols = {tr[ax.value] for ax in axis[i] if tr[ax.value]}
             unused_cols = [c for c in icols[i] if c not in used_cols]
             if unused_cols and not any(tr[ax.value] for ax in axis[i]):
-                traces[i] = list(
+                traces[i] = [
                     v or (unused_cols.pop(0) if unused_cols and _Chart_iprops(j) in axis[i] else v)
                     for j, v in enumerate(tr)
-                )
+                ]
 
     if col_dict is not None:
         reverse_cols = {str(cd.get("dfid")): c for c, cd in col_dict.items()}

+ 0 - 1
taipy/gui/utils/filename.py

@@ -9,7 +9,6 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import typing as t
 from pathlib import Path
 
 

+ 1 - 2
taipy/gui/utils/get_module_name.py

@@ -9,10 +9,9 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import inspect
 import sys
 import typing as t
-from types import FrameType, ModuleType
+from types import FrameType
 
 
 def _get_module_name_from_frame(frame: FrameType):

+ 1 - 1
taipy/gui/utils/proxy.py

@@ -46,7 +46,7 @@ def _modifiedHandleResponseEnd(self):
     self.transport.loseConnection()
 
 
-setattr(ProxyClient, "handleResponseEnd", _modifiedHandleResponseEnd)
+setattr(ProxyClient, "handleResponseEnd", _modifiedHandleResponseEnd)  # noqa: B010
 
 
 class _TaipyReverseProxyResource(Resource):

+ 2 - 2
taipy/gui/utils/types.py

@@ -81,7 +81,7 @@ class _TaipyNumber(_TaipyBase):
         try:
             return float(super().get())
         except Exception as e:
-            raise TypeError(f"Variable '{self._get_readable_name()}' should hold a number: {e}")
+            raise TypeError(f"Variable '{self._get_readable_name()}' should hold a number: {e}") from None
 
     def cast_value(self, value: t.Any):
         if isinstance(value, str):
@@ -101,7 +101,7 @@ class _TaipyLoNumbers(_TaipyBase):
     def cast_value(self, value: t.Any):
         if isinstance(value, str):
             try:
-                return list(map(lambda f: float(f), value[1:-1].split(",")))
+                return [float(f) for f in value[1:-1].split(",")]
             except Exception as e:
                 _warn(f"{self._get_readable_name()}: Parsing {value} as an array of numbers", e)
                 return []

+ 13 - 2
taipy/gui_core/_adapters.py

@@ -12,9 +12,20 @@
 import typing as t
 from enum import Enum
 
-from taipy.core import Cycle, DataNode, Job, Scenario, Sequence, Task
+from taipy.core import (
+    Cycle,
+    DataNode,
+    Job,
+    Scenario,
+    Sequence,
+    Task,
+    is_deletable,
+    is_editable,
+    is_promotable,
+    is_readable,
+    is_submittable,
+)
 from taipy.core import get as core_get
-from taipy.core import is_deletable, is_editable, is_promotable, is_readable, is_submittable
 from taipy.gui._warnings import _warn
 from taipy.gui.gui import _DoNotUpdate
 from taipy.gui.utils import _TaipyBase

+ 5 - 8
taipy/gui_core/_context.py

@@ -29,18 +29,13 @@ from taipy.core import (
     DataNode,
     DataNodeId,
     Job,
-    JobId,
     Scenario,
     ScenarioId,
     Sequence,
     SequenceId,
     cancel_job,
     create_scenario,
-)
-from taipy.core import delete as core_delete
-from taipy.core import delete_job
-from taipy.core import get as core_get
-from taipy.core import (
+    delete_job,
     get_cycles_scenarios,
     get_data_nodes,
     get_jobs,
@@ -51,13 +46,15 @@ from taipy.core import (
     is_submittable,
     set_primary,
 )
+from taipy.core import delete as core_delete
+from taipy.core import get as core_get
 from taipy.core import submit as core_submit
 from taipy.core.data._abstract_tabular import _AbstractTabularDataNode
 from taipy.core.notification import CoreEventConsumerBase, EventEntityType
 from taipy.core.notification.event import Event, EventOperation
 from taipy.core.notification.notifier import Notifier
-from taipy.core.submission.submission import Submission
 from taipy.core.submission._submission_manager_factory import _SubmissionManagerFactory
+from taipy.core.submission.submission import Submission
 from taipy.core.submission.submission_status import SubmissionStatus
 from taipy.gui import Gui, State
 from taipy.gui._warnings import _warn
@@ -674,7 +671,7 @@ class _GuiCoreContext(CoreEventConsumerBase):
                         else e.get("comment", ""),
                     )
                 )
-            return list(reversed(sorted(res, key=lambda r: r[0])))
+            return sorted(res, key=lambda r: r[0], reverse=True)
         return _DoNotUpdate()
 
     def get_data_node_data(self, datanode: DataNode, id: str):

+ 1 - 0
taipy/rest/api/error_handler.py

@@ -11,6 +11,7 @@
 
 from flask import jsonify
 from marshmallow import ValidationError
+
 from taipy.core.exceptions.exceptions import (
     NonExistingCycle,
     NonExistingDataNode,

+ 1 - 0
taipy/rest/api/resources/job.py

@@ -14,6 +14,7 @@ from typing import Optional
 
 from flask import request
 from flask_restful import Resource
+
 from taipy.config.config import Config
 from taipy.core import Job, JobId
 from taipy.core.exceptions.exceptions import NonExistingJob, NonExistingTaskConfig

+ 1 - 0
taipy/rest/api/resources/scenario.py

@@ -11,6 +11,7 @@
 
 from flask import request
 from flask_restful import Resource
+
 from taipy.config.config import Config
 from taipy.core.exceptions.exceptions import NonExistingScenario, NonExistingScenarioConfig
 from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory

+ 1 - 0
taipy/rest/api/resources/task.py

@@ -11,6 +11,7 @@
 
 from flask import request
 from flask_restful import Resource
+
 from taipy.config.config import Config
 from taipy.core.exceptions.exceptions import NonExistingTask, NonExistingTaskConfig
 from taipy.core.task._task_manager_factory import _TaskManagerFactory

+ 1 - 0
taipy/rest/api/views.py

@@ -11,6 +11,7 @@
 
 from flask import Blueprint, current_app
 from flask_restful import Api
+
 from taipy.core.common._utils import _load_fct
 from taipy.logger._taipy_logger import _TaipyLogger
 

+ 1 - 0
taipy/templates/default/{{cookiecutter.__root_folder_name}}/configuration/config.py

@@ -16,6 +16,7 @@ The configuration is run by the Core service.
 """
 
 from algorithms import *
+
 from taipy import Config
 
 # #############################################################################

+ 0 - 3
tests/config/checker/checkers/test_checker.py

@@ -9,13 +9,10 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import os
-from unittest import mock
 from unittest.mock import MagicMock
 
 from taipy.config import Config
 from taipy.config.checker._checker import _Checker
-from taipy.config.checker.issue_collector import IssueCollector
 from tests.config.utils.checker_for_tests import CheckerForTest
 
 

+ 0 - 1
tests/config/test_config_comparator.py

@@ -179,7 +179,6 @@ class TestConfigComparator:
 
     def test_comparator_with_different_order_list_attributes(self):
         _config_1 = _Config._default_config()
-        _config_1._unique_sections
         _config_1._sections[SectionForTest.name] = {"section_3": self.section_3b}
 
         # Create _config_2 with different order of list attributes

+ 2 - 2
tests/config/test_override_config.py

@@ -88,12 +88,12 @@ bar_attribute = "ENV[BAR]:bool"
     with mock.patch.dict(os.environ, {"FOO": "foo", "BAR": "true"}):
         with pytest.raises(InconsistentEnvVariableError):
             Config.load(tf.filename)
-            Config.global_config.foo_attribute
+            _ = Config.global_config.foo_attribute
 
     with mock.patch.dict(os.environ, {"FOO": "5"}):
         with pytest.raises(MissingEnvVariableError):
             Config.load(tf.filename)
-            Config.global_config.bar_attribute
+            _ = Config.global_config.bar_attribute
 
     with mock.patch.dict(os.environ, {"FOO": "6", "BAR": "TRUe"}):
         Config.load(tf.filename)

+ 1 - 1
tests/config/test_section_serialization.py

@@ -43,7 +43,7 @@ class CustomEncoder(json.JSONEncoder):
 
 class CustomDecoder(json.JSONDecoder):
     def __init__(self, *args, **kwargs):
-        json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
+        json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
 
     def object_hook(self, source):
         if source.get("__type__") == "Datetime":

+ 1 - 0
tests/core/_entity/test_labelled.py

@@ -12,6 +12,7 @@
 from unittest import mock
 
 import pytest
+
 from taipy.config import Config, Frequency, Scope
 from taipy.core import taipy
 from taipy.core._entity._labeled import _Labeled

+ 6 - 4
tests/core/_entity/test_migrate_cli.py

@@ -13,10 +13,12 @@ import filecmp
 import os
 import shutil
 import sys
+from sqlite3 import OperationalError
 from unittest.mock import patch
 
 import mongomock
 import pytest
+
 from taipy.core._entity._migrate_cli import _MigrateCLI
 
 
@@ -170,7 +172,7 @@ def test_migrate_sql_backup_and_remove(caplog, tmp_sqlite):
     assert not os.path.exists(backup_sqlite)
 
     # Run without --skip-backup to create the backup database
-    with pytest.raises(Exception):
+    with pytest.raises((SystemExit, OperationalError)):
         with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite]):
             _MigrateCLI.parse_arguments()
 
@@ -204,7 +206,7 @@ def test_migrate_sql_backup_and_restore(caplog, tmp_sqlite):
     assert not os.path.exists(backup_sqlite)
 
     # Run without --skip-backup to create the backup database
-    with pytest.raises(Exception):
+    with pytest.raises((SystemExit, OperationalError)):
         with patch("sys.argv", ["prog", "migrate", "--repository-type", "sql", tmp_sqlite]):
             _MigrateCLI.parse_arguments()
 
@@ -269,7 +271,7 @@ def test_migrate_mongo_backup_and_remove(caplog):
     assert not os.path.exists(mongo_backup_path)
 
     # Run without --skip-backup to create the backup database
-    with pytest.raises(Exception):
+    with pytest.raises(SystemExit):
         with patch("sys.argv", ["prog", "migrate", "--repository-type", "mongo"]):
             _MigrateCLI.parse_arguments()
 
@@ -298,7 +300,7 @@ def test_migrate_mongo_backup_and_restore(caplog):
     assert not os.path.exists(mongo_backup_path)
 
     # Run without --skip-backup to create the backup database
-    with pytest.raises(Exception):
+    with pytest.raises(SystemExit):
         with patch("sys.argv", ["prog", "migrate", "--repository-type", "mongo"]):
             _MigrateCLI.parse_arguments()
 

+ 9 - 10
tests/core/_orchestrator/test_orchestrator.py

@@ -18,7 +18,6 @@ from functools import partial
 from time import sleep
 
 import pytest
-from tests.core.utils import assert_true_after_time
 
 from taipy.config import Config
 from taipy.config.common.scope import Scope
@@ -28,6 +27,7 @@ from taipy.core._orchestrator._orchestrator import _Orchestrator
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
 from taipy.core.config.job_config import JobConfig
 from taipy.core.data._data_manager import _DataManager
+from taipy.core.data.pickle import PickleDataNode
 from taipy.core.scenario._scenario_manager import _ScenarioManager
 from taipy.core.scenario.scenario import Scenario
 from taipy.core.sequence.sequence import Sequence
@@ -35,8 +35,7 @@ from taipy.core.submission._submission_manager import _SubmissionManager
 from taipy.core.submission.submission_status import SubmissionStatus
 from taipy.core.task._task_manager import _TaskManager
 from taipy.core.task.task import Task
-from taipy.core.data.pickle import PickleDataNode
-
+from tests.core.utils import assert_true_after_time
 
 # ################################  USER FUNCTIONS  ##################################
 
@@ -261,19 +260,19 @@ def test_scenario_only_submit_same_task_once():
 
     jobs = _Orchestrator.submit(scenario_1)
     assert len(jobs) == 3
-    assert all([job.is_completed() for job in jobs])
+    assert all(job.is_completed() for job in jobs)
     assert all(not _Orchestrator._is_blocked(job) for job in jobs)
     assert _SubmissionManager._get(jobs[0].submit_id).submission_status == SubmissionStatus.COMPLETED
 
     jobs = _Orchestrator.submit(sequence_1)
     assert len(jobs) == 2
-    assert all([job.is_completed() for job in jobs])
+    assert all(job.is_completed() for job in jobs)
     assert all(not _Orchestrator._is_blocked(job) for job in jobs)
     assert _SubmissionManager._get(jobs[0].submit_id).submission_status == SubmissionStatus.COMPLETED
 
     jobs = _Orchestrator.submit(sequence_2)
     assert len(jobs) == 2
-    assert all([job.is_completed() for job in jobs])
+    assert all(job.is_completed() for job in jobs)
     assert all(not _Orchestrator._is_blocked(job) for job in jobs)
     assert _SubmissionManager._get(jobs[0].submit_id).submission_status == SubmissionStatus.COMPLETED
 
@@ -309,7 +308,7 @@ def test_update_status_fail_job():
     jobs = _Orchestrator.submit(scenario_1)
     tasks_jobs = {job._task.id: job for job in jobs}
     assert tasks_jobs["task_0"].is_failed()
-    assert all([job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]]])
+    assert all(job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]])
     assert tasks_jobs["task_3"].is_completed()
     assert all(not _Orchestrator._is_blocked(job) for job in jobs)
     assert _SubmissionManager._get(jobs[0].submit_id).submission_status == SubmissionStatus.FAILED
@@ -317,7 +316,7 @@ def test_update_status_fail_job():
     jobs = _Orchestrator.submit(scenario_2)
     tasks_jobs = {job._task.id: job for job in jobs}
     assert tasks_jobs["task_0"].is_failed()
-    assert all([job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]]])
+    assert all(job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]])
     assert tasks_jobs["task_3"].is_completed()
     assert all(not _Orchestrator._is_blocked(job) for job in jobs)
     assert _SubmissionManager._get(jobs[0].submit_id).submission_status == SubmissionStatus.FAILED
@@ -368,7 +367,7 @@ def test_update_status_fail_job_in_parallel_one_sequence():
 
     tasks_jobs = {job._task.id: job for job in jobs}
     assert_true_after_time(tasks_jobs["task_0"].is_failed)
-    assert_true_after_time(lambda: all([job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]]]))
+    assert_true_after_time(lambda: all(job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]]))
     assert_true_after_time(lambda: all(not _Orchestrator._is_blocked(job) for job in jobs))
     submit_id = jobs[0].submit_id
     submission = _SubmissionManager._get(submit_id)
@@ -402,7 +401,7 @@ def test_update_status_fail_job_in_parallel_one_scenario():
     tasks_jobs = {job._task.id: job for job in jobs}
     assert_true_after_time(tasks_jobs["task_0"].is_failed)
     assert_true_after_time(tasks_jobs["task_3"].is_completed)
-    assert_true_after_time(lambda: all([job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]]]))
+    assert_true_after_time(lambda: all(job.is_abandoned() for job in [tasks_jobs["task_1"], tasks_jobs["task_2"]]))
     assert_true_after_time(lambda: all(not _Orchestrator._is_blocked(job) for job in jobs))
     submit_id = jobs[0].submit_id
     submission = _SubmissionManager._get(submit_id)

+ 1 - 0
tests/core/_orchestrator/test_orchestrator_factory.py

@@ -12,6 +12,7 @@
 from unittest import mock
 
 import pytest
+
 from taipy.config import Config
 from taipy.core._orchestrator._dispatcher import _DevelopmentJobDispatcher, _JobDispatcher, _StandaloneJobDispatcher
 from taipy.core._orchestrator._orchestrator import _Orchestrator

+ 7 - 5
tests/core/common/test_retry.py

@@ -10,18 +10,20 @@
 # specific language governing permissions and limitations under the License.
 
 import pytest
+
 from taipy.config import Config
 from taipy.core.common._utils import _retry_read_entity
+from taipy.core.exceptions import ModelNotFound
 
 
 def test_retry_decorator(mocker):
-    func = mocker.Mock(side_effect=Exception())
+    func = mocker.Mock(side_effect=ModelNotFound())
 
-    @_retry_read_entity((Exception,))
+    @_retry_read_entity((ModelNotFound,))
     def decorated_func():
         func()
 
-    with pytest.raises(Exception):
+    with pytest.raises(ModelNotFound):
         decorated_func()
     # Called once in the normal flow and no retry
     # The Config.core.read_entity_retry is set to 0 at conftest.py
@@ -31,7 +33,7 @@ def test_retry_decorator(mocker):
     func.reset_mock()
 
     Config.core.read_entity_retry = 3
-    with pytest.raises(Exception):
+    with pytest.raises(ModelNotFound):
         decorated_func()
     # Called once in the normal flow and 3 more times on the retry flow
     assert func.call_count == 4
@@ -41,7 +43,7 @@ def test_retry_decorator_exception_not_in_list(mocker):
     func = mocker.Mock(side_effect=KeyError())
     Config.core.read_entity_retry = 3
 
-    @_retry_read_entity((Exception,))
+    @_retry_read_entity((ModelNotFound,))
     def decorated_func():
         func()
 

+ 5 - 5
tests/core/common/test_warn_if_inputs_not_ready.py

@@ -28,7 +28,7 @@ def test_warn_inputs_all_not_ready(caplog):
         f"path : {input_dn.path} "
         for input_dn in data_nodes
     ]
-    assert all([expected_output in stdout for expected_output in expected_outputs])
+    assert all(expected_output in stdout for expected_output in expected_outputs)
 
 
 def test_warn_inputs_all_ready(caplog):
@@ -45,7 +45,7 @@ def test_warn_inputs_all_ready(caplog):
         f"path : {input_dn.path} "
         for input_dn in data_nodes
     ]
-    assert all([expected_output not in stdout for expected_output in not_expected_outputs])
+    assert all(expected_output not in stdout for expected_output in not_expected_outputs)
 
 
 def test_warn_inputs_one_ready(caplog):
@@ -69,8 +69,8 @@ def test_warn_inputs_one_ready(caplog):
         for input_dn in [data_nodes[one]]
     ]
 
-    assert all([expected_output in stdout for expected_output in expected_outputs])
-    assert all([expected_output not in stdout for expected_output in not_expected_outputs])
+    assert all(expected_output in stdout for expected_output in expected_outputs)
+    assert all(expected_output not in stdout for expected_output in not_expected_outputs)
 
 
 def test_submit_task_with_input_dn_wrong_file_path(caplog):
@@ -92,4 +92,4 @@ def test_submit_task_with_input_dn_wrong_file_path(caplog):
         f"path : {input_dn.path} "
         for input_dn in dns
     ]
-    assert all([expected_output in stdout for expected_output in expected_outputs])
+    assert all(expected_output in stdout for expected_output in expected_outputs)

+ 1 - 0
tests/core/config/checkers/test_job_config_checker.py

@@ -10,6 +10,7 @@
 # specific language governing permissions and limitations under the License.
 
 import pytest
+
 from taipy.config.checker.issue_collector import IssueCollector
 from taipy.config.config import Config
 from taipy.core.config.job_config import JobConfig

+ 1 - 0
tests/core/config/checkers/test_migration_config_checker.py

@@ -12,6 +12,7 @@
 from unittest.mock import patch
 
 import pytest
+
 from taipy.config.config import Config
 from taipy.core import Core
 from taipy.core._version._version_manager import _VersionManager

+ 1 - 1
tests/core/config/test_config_serialization.py

@@ -58,7 +58,7 @@ class CustomEncoder(json.JSONEncoder):
 
 class CustomDecoder(json.JSONDecoder):
     def __init__(self, *args, **kwargs):
-        json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
+        json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
 
     def object_hook(self, source):
         if source.get("__type__") == "Datetime":

+ 3 - 3
tests/core/config/test_core_section.py

@@ -13,12 +13,12 @@ import os
 from unittest.mock import patch
 
 import pytest
+
 from taipy.config import Config
 from taipy.config.exceptions.exceptions import MissingEnvVariableError
 from taipy.core import Core
 from taipy.core._version._version_manager_factory import _VersionManagerFactory
 from taipy.core.config import CoreSection
-
 from tests.core.utils.named_temporary_file import NamedTemporaryFile
 
 
@@ -74,9 +74,9 @@ def test_config_attribute_overiden_by_code_config_including_env_variable_values(
     Config.configure_core(root_folder="ENV[ROOT_FOLDER]", storage_folder="ENV[STORAGE_FOLDER]")
 
     with pytest.raises(MissingEnvVariableError):
-        Config.core.root_folder
+        _ = Config.core.root_folder
     with pytest.raises(MissingEnvVariableError):
-        Config.core.storage_folder
+        _ = Config.core.storage_folder
 
     with patch.dict(os.environ, {"ROOT_FOLDER": "foo", "STORAGE_FOLDER": "bar"}):
         assert Config.core.root_folder == "foo"

+ 1 - 0
tests/core/config/test_core_version.py

@@ -12,6 +12,7 @@
 from unittest.mock import patch
 
 import pytest
+
 from taipy.config.config import Config
 from taipy.core._init_version import _read_version
 from taipy.core.config.core_section import CoreSection

+ 1 - 0
tests/core/config/test_data_node_config.py

@@ -14,6 +14,7 @@ import os
 from unittest import mock
 
 import pytest
+
 from taipy.config.common.scope import Scope
 from taipy.config.config import Config
 from taipy.config.exceptions.exceptions import ConfigurationUpdateBlocked

+ 0 - 2
tests/core/config/test_job_config.py

@@ -9,8 +9,6 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import pytest
-
 from taipy.config.config import Config
 
 

+ 2 - 8
tests/core/data/test_aws_s3_data_node.py

@@ -9,18 +9,12 @@
 # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
 # specific language governing permissions and limitations under the License.
 
-import os
-from dataclasses import dataclass
-from unittest.mock import patch
-
 import boto3
-from moto import mock_s3
 import pytest
+from moto import mock_s3
 
-from taipy.core.data.data_node_id import DataNodeId
-from taipy.core.data.aws_s3 import S3ObjectDataNode
-from taipy.core.exceptions.exceptions import InvalidCustomDocument, MissingRequiredProperty
 from taipy.config.common.scope import Scope
+from taipy.core.data.aws_s3 import S3ObjectDataNode
 
 
 class TestS3ObjectDataNode:

Некоторые файлы не были показаны из-за большого количества измененных файлов