Переглянути джерело

Merge pull request #1007 from Avaiga/refactor/continue#609-cleaner-code-and-stability

Refactor - Cleaner Code & Stability
Đỗ Trường Giang 1 рік тому
батько
коміт
5dc674c146
46 змінених файлів з 302 додано та 374 видалено
  1. 1 1
      taipy/core/_entity/submittable.py
  2. 1 1
      taipy/core/_orchestrator/_orchestrator.py
  3. 2 3
      taipy/core/_repository/_sql_repository.py
  4. 1 4
      taipy/core/_repository/db/_sql_connection.py
  5. 1 2
      taipy/core/_version/_utils.py
  6. 8 10
      taipy/core/_version/_version_manager.py
  7. 5 5
      taipy/core/_version/_version_mixin.py
  8. 9 10
      taipy/core/common/_utils.py
  9. 1 1
      taipy/core/config/checkers/_config_id_checker.py
  10. 89 85
      taipy/core/config/checkers/_data_node_config_checker.py
  11. 3 4
      taipy/core/config/checkers/_scenario_config_checker.py
  12. 7 8
      taipy/core/config/checkers/_task_config_checker.py
  13. 1 4
      taipy/core/config/data_node_config.py
  14. 1 2
      taipy/core/config/job_config.py
  15. 7 10
      taipy/core/config/scenario_config.py
  16. 3 3
      taipy/core/cycle/cycle.py
  17. 1 1
      taipy/core/data/_abstract_sql.py
  18. 1 2
      taipy/core/data/_data_manager.py
  19. 22 23
      taipy/core/data/_filter.py
  20. 1 1
      taipy/core/data/aws_s3.py
  21. 2 6
      taipy/core/data/csv.py
  22. 7 12
      taipy/core/data/data_node.py
  23. 2 5
      taipy/core/data/excel.py
  24. 3 3
      taipy/core/data/generic.py
  25. 1 1
      taipy/core/data/mongo.py
  26. 1 3
      taipy/core/job/_job_manager.py
  27. 1 11
      taipy/core/notification/_topic.py
  28. 11 20
      taipy/core/scenario/_scenario_manager.py
  29. 5 6
      taipy/core/sequence/_sequence_manager.py
  30. 2 6
      taipy/core/submission/submission.py
  31. 1 2
      taipy/core/task/task.py
  32. 2 4
      taipy/gui/_renderers/factory.py
  33. 1 4
      taipy/gui/extension/library.py
  34. 2 2
      taipy/gui/gui.py
  35. 20 18
      taipy/gui/utils/_evaluator.py
  36. 3 6
      taipy/gui/utils/html.py
  37. 4 7
      taipy/gui_core/_adapters.py
  38. 24 25
      taipy/gui_core/_context.py
  39. 4 5
      taipy/rest/api/resources/cycle.py
  40. 8 9
      taipy/rest/api/resources/datanode.py
  41. 8 9
      taipy/rest/api/resources/job.py
  42. 8 9
      taipy/rest/api/resources/scenario.py
  43. 4 5
      taipy/rest/api/resources/sequence.py
  44. 8 9
      taipy/rest/api/resources/task.py
  45. 2 3
      taipy/rest/commons/apispec.py
  46. 3 4
      taipy/rest/commons/encoder.py

+ 1 - 1
taipy/core/_entity/submittable.py

@@ -34,7 +34,7 @@ class Submittable:
     """
 
     def __init__(self, subscribers: Optional[List[_Subscriber]] = None):
-        self._subscribers = _ListAttributes(self, subscribers or list())
+        self._subscribers = _ListAttributes(self, subscribers or [])
 
     @abc.abstractmethod
     def submit(

+ 1 - 1
taipy/core/_orchestrator/_orchestrator.py

@@ -269,7 +269,7 @@ class _Orchestrator(_AbstractOrchestrator):
             if job.submit_id == submit_id and len(output_dn_config_ids.intersection(job_input_dn_config_ids)) > 0:
                 next_output_dn_config_ids.update(job.task.output.keys())
                 subsequent_jobs.update([job])
-        if len(next_output_dn_config_ids) > 0:
+        if next_output_dn_config_ids:
             subsequent_jobs.update(
                 cls.__find_subsequent_jobs(submit_id, output_dn_config_ids=next_output_dn_config_ids)
             )

+ 2 - 3
taipy/core/_repository/_sql_repository.py

@@ -165,8 +165,7 @@ class _SQLRepository(_AbstractRepository[ModelType, Entity]):
         configs_and_owner_ids = set(configs_and_owner_ids)
 
         for config, owner in configs_and_owner_ids:
-            entry = self.__get_entities_by_config_and_owner(config.id, owner, filters)
-            if entry:
+            if entry := self.__get_entities_by_config_and_owner(config.id, owner, filters):
                 entity = self.converter._model_to_entity(entry)
                 key = config, owner
                 res[key] = entity
@@ -190,7 +189,7 @@ class _SQLRepository(_AbstractRepository[ModelType, Entity]):
 
         if versions:
             table_name = self.table.name
-            query = query + f" AND {table_name}.version IN ({','.join(['?']*len(versions))})"
+            query += f" AND {table_name}.version IN ({','.join(['?'] * len(versions))})"
             parameters.extend(versions)
 
         if entry := self.db.execute(query, parameters).fetchone():

+ 1 - 4
taipy/core/_repository/db/_sql_connection.py

@@ -22,10 +22,7 @@ from ...exceptions import MissingRequiredProperty
 
 
 def dict_factory(cursor, row):
-    d = {}
-    for idx, col in enumerate(cursor.description):
-        d[col[0]] = row[idx]
-    return d
+    return {col[0]: row[idx] for idx, col in enumerate(cursor.description)}
 
 
 class _SQLConnection:

+ 1 - 2
taipy/core/_version/_utils.py

@@ -43,8 +43,7 @@ def __get_migration_fcts_to_latest(source_version: str, config_id: str) -> List[
     versions_to_migrate = production_versions[start_index:]
 
     for version in versions_to_migrate:
-        migration_fct = Config.unique_sections[MigrationConfig.name].migration_fcts.get(version, {}).get(config_id)
-        if migration_fct:
+        if migration_fct := Config.unique_sections[MigrationConfig.name].migration_fcts.get(version, {}).get(config_id):
             migration_fcts_to_latest.append(migration_fct)
 
     return migration_fcts_to_latest

+ 8 - 10
taipy/core/_version/_version_manager.py

@@ -54,14 +54,12 @@ class _VersionManager(_Manager[_Version]):
         if version := cls._get(id):
             comparator_result = Config._comparator._find_conflict_config(version.config, Config._applied_config, id)  # type: ignore[attr-defined]
             if comparator_result.get(_ComparatorResult.CONFLICTED_SECTION_KEY):
-                if force:
-                    cls.__logger.warning(
-                        f"Option --force is detected, overriding the configuration of version {id} ..."
-                    )
-                    version.config = Config._applied_config  # type: ignore[attr-defined]
-                else:
+                if not force:
                     raise ConflictedConfigurationError()
 
+                cls.__logger.warning(f"Option --force is detected, overriding the configuration of version {id} ...")
+                version.config = Config._applied_config  # type: ignore[attr-defined]
+
         else:
             version = _Version(id=id, config=Config._applied_config)  # type: ignore[attr-defined]
 
@@ -212,16 +210,16 @@ class _VersionManager(_Manager[_Version]):
             raise SystemExit(f"Undefined execution mode: {Config.core.mode}.")
 
     @classmethod
-    def __check_production_migration_config(self):
+    def __check_production_migration_config(cls):
         from ..config.checkers._migration_config_checker import _MigrationConfigChecker
 
         collector = _MigrationConfigChecker(Config._applied_config, IssueCollector())._check()
         for issue in collector._warnings:
-            self.__logger.warning(str(issue))
+            cls.__logger.warning(str(issue))
         for issue in collector._infos:
-            self.__logger.info(str(issue))
+            cls.__logger.info(str(issue))
         for issue in collector._errors:
-            self.__logger.error(str(issue))
+            cls.__logger.error(str(issue))
         if len(collector._errors) != 0:
             raise SystemExit("Configuration errors found. Please check the error log for more information.")
 

+ 5 - 5
taipy/core/_version/_version_mixin.py

@@ -15,7 +15,6 @@ from .._version._version_manager_factory import _VersionManagerFactory
 
 
 class _VersionMixin:
-
     _version_manager = _VersionManagerFactory._build_manager()
 
     @classmethod
@@ -28,10 +27,11 @@ class _VersionMixin:
 
     @classmethod
     def _build_filters_with_version(cls, version_number) -> List[Dict]:
-        filters = []
-        if versions := cls.__fetch_version_number(version_number):
-            filters = [{"version": version} for version in versions]
-        return filters
+        return (
+            [{"version": version} for version in versions]
+            if (versions := cls.__fetch_version_number(version_number))
+            else []
+        )
 
     @classmethod
     def _get_latest_version(cls):

+ 9 - 10
taipy/core/common/_utils.py

@@ -54,8 +54,7 @@ def _retry_read_entity(exceptions: Tuple, sleep_time: float = 0.2):
 def _get_fct_name(f) -> Optional[str]:
     # Mock function does not have __qualname__ attribute -> return __name__
     # Partial or anonymous function does not have __name__ or __qualname__ attribute -> return None
-    name = getattr(f, "__qualname__", getattr(f, "__name__", None))
-    return name
+    return getattr(f, "__qualname__", getattr(f, "__name__", None))
 
 
 def _fct_to_dict(obj):
@@ -66,14 +65,14 @@ def _fct_to_dict(obj):
         callback = obj.callback
         params = obj.params
 
-    fct_name = _get_fct_name(callback)
-    if not fct_name:
-        return None
-    return {
-        "fct_name": fct_name,
-        "fct_params": params,
-        "fct_module": callback.__module__,
-    }
+    if fct_name := _get_fct_name(callback):
+        return {
+            "fct_name": fct_name,
+            "fct_params": params,
+            "fct_module": callback.__module__,
+        }
+
+    return None
 
 
 def _fcts_to_dict(objs):

+ 1 - 1
taipy/core/config/checkers/_config_id_checker.py

@@ -24,7 +24,7 @@ class _ConfigIdChecker(_ConfigChecker):
         existing_config_ids: Dict[str, List[str]] = dict()
         for entity_type, section_dictionary in self._config._sections.items():
             for config_id in section_dictionary.keys():
-                if config_id in existing_config_ids.keys():
+                if config_id in existing_config_ids:
                     existing_config_ids[config_id].append(entity_type)
                 else:
                     existing_config_ids[config_id] = [entity_type]

+ 89 - 85
taipy/core/config/checkers/_data_node_config_checker.py

@@ -102,96 +102,100 @@ class _DataNodeConfigChecker(_ConfigChecker):
             )
 
     def _check_required_properties(self, data_node_config_id: str, data_node_config: DataNodeConfig):
-        if storage_type := data_node_config.storage_type:
-            if storage_type in DataNodeConfig._REQUIRED_PROPERTIES:
-                required_properties = DataNodeConfig._REQUIRED_PROPERTIES[storage_type]
-                if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL:
-                    if data_node_config.properties:
-                        if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY):
-                            if engine == DataNodeConfig._DB_ENGINE_SQLITE:
-                                required_properties = [
-                                    DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY,
-                                ]
-                            else:
-                                required_properties = [
-                                    DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY,
-                                    DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY,
-                                ]
-                if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL_TABLE:
-                    if data_node_config.properties:
-                        if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY):
-                            if engine == DataNodeConfig._DB_ENGINE_SQLITE:
-                                required_properties = [
-                                    DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY,
-                                ]
-                            else:
-                                required_properties = [
-                                    DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY,
-                                    DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
-                                    DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY,
-                                ]
-                for required_property in required_properties:
-                    if not data_node_config.properties or required_property not in data_node_config.properties:
-                        if data_node_config_id == DataNodeConfig._DEFAULT_KEY:
-                            self._warning(
-                                required_property,
-                                None,
-                                f"DataNodeConfig `{data_node_config_id}` is missing the required "
-                                f"property `{required_property}` for type `{storage_type}`.",
-                            )
-                        else:
-                            self._error(
-                                required_property,
-                                None,
-                                f"DataNodeConfig `{data_node_config_id}` is missing the required "
-                                f"property `{required_property}` for type `{storage_type}`.",
-                            )
+        storage_type = data_node_config.storage_type
+        if not storage_type or storage_type not in DataNodeConfig._REQUIRED_PROPERTIES:
+            return
 
-    def _check_generic_read_write_fct_and_args(self, data_node_config_id: str, data_node_config: DataNodeConfig):
-        if data_node_config.storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC:
-            properties_to_check = [
-                DataNodeConfig._OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY,
-                DataNodeConfig._OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY,
-            ]
-            for prop_key in properties_to_check:
-                if data_node_config.properties and prop_key in data_node_config.properties:
-                    prop_value = data_node_config.properties[prop_key]
-                    if not isinstance(prop_value, list):
-                        self._error(
-                            prop_key,
-                            prop_value,
-                            f"`{prop_key}` field of DataNodeConfig"
-                            f" `{data_node_config_id}` must be populated with a List value.",
-                        )
-            if data_node_config_id != DataNodeConfig._DEFAULT_KEY:
-                properties_to_check_at_least_one = [
-                    DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY,
-                    DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY,
-                ]
-                has_at_least_one = False
-                for prop_key in properties_to_check_at_least_one:
-                    if data_node_config.properties and prop_key in data_node_config.properties:
-                        has_at_least_one = True
-                if not has_at_least_one:
+        required_properties = DataNodeConfig._REQUIRED_PROPERTIES[storage_type]
+        if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL:
+            if data_node_config.properties:
+                if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY):
+                    if engine == DataNodeConfig._DB_ENGINE_SQLITE:
+                        required_properties = [
+                            DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY,
+                        ]
+                    else:
+                        required_properties = [
+                            DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY,
+                            DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_READ_QUERY_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_WRITE_QUERY_BUILDER_SQL_PROPERTY,
+                        ]
+        if storage_type == DataNodeConfig._STORAGE_TYPE_VALUE_SQL_TABLE:
+            if data_node_config.properties:
+                if engine := data_node_config.properties.get(DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY):
+                    if engine == DataNodeConfig._DB_ENGINE_SQLITE:
+                        required_properties = [
+                            DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY,
+                        ]
+                    else:
+                        required_properties = [
+                            DataNodeConfig._OPTIONAL_DB_USERNAME_SQL_PROPERTY,
+                            DataNodeConfig._OPTIONAL_DB_PASSWORD_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_DB_NAME_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_DB_ENGINE_SQL_PROPERTY,
+                            DataNodeConfig._REQUIRED_TABLE_NAME_SQL_TABLE_PROPERTY,
+                        ]
+        for required_property in required_properties:
+            if not data_node_config.properties or required_property not in data_node_config.properties:
+                if data_node_config_id == DataNodeConfig._DEFAULT_KEY:
+                    self._warning(
+                        required_property,
+                        None,
+                        f"DataNodeConfig `{data_node_config_id}` is missing the required "
+                        f"property `{required_property}` for type `{storage_type}`.",
+                    )
+                else:
                     self._error(
-                        ", ".join(properties_to_check_at_least_one),
+                        required_property,
                         None,
-                        f"Either `{DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY}` field or "
-                        f"`{DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY}` field of "
-                        f"DataNodeConfig `{data_node_config_id}` must be populated with a Callable function.",
+                        f"DataNodeConfig `{data_node_config_id}` is missing the required "
+                        f"property `{required_property}` for type `{storage_type}`.",
                     )
 
+    def _check_generic_read_write_fct_and_args(self, data_node_config_id: str, data_node_config: DataNodeConfig):
+        if data_node_config.storage_type != DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC:
+            return
+
+        properties_to_check = [
+            DataNodeConfig._OPTIONAL_READ_FUNCTION_ARGS_GENERIC_PROPERTY,
+            DataNodeConfig._OPTIONAL_WRITE_FUNCTION_ARGS_GENERIC_PROPERTY,
+        ]
+        for prop_key in properties_to_check:
+            if data_node_config.properties and prop_key in data_node_config.properties:
+                prop_value = data_node_config.properties[prop_key]
+                if not isinstance(prop_value, list):
+                    self._error(
+                        prop_key,
+                        prop_value,
+                        f"`{prop_key}` field of DataNodeConfig"
+                        f" `{data_node_config_id}` must be populated with a List value.",
+                    )
+        if data_node_config_id != DataNodeConfig._DEFAULT_KEY:
+            properties_to_check_at_least_one = [
+                DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY,
+                DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY,
+            ]
+            has_at_least_one = False
+            for prop_key in properties_to_check_at_least_one:
+                if data_node_config.properties and prop_key in data_node_config.properties:
+                    has_at_least_one = True
+            if not has_at_least_one:
+                self._error(
+                    ", ".join(properties_to_check_at_least_one),
+                    None,
+                    f"Either `{DataNodeConfig._OPTIONAL_READ_FUNCTION_GENERIC_PROPERTY}` field or "
+                    f"`{DataNodeConfig._OPTIONAL_WRITE_FUNCTION_GENERIC_PROPERTY}` field of "
+                    f"DataNodeConfig `{data_node_config_id}` must be populated with a Callable function.",
+                )
+
     def _check_callable(self, data_node_config_id: str, data_node_config: DataNodeConfig):
         properties_to_check = {
             DataNodeConfig._STORAGE_TYPE_VALUE_GENERIC: [

+ 3 - 4
taipy/core/config/checkers/_scenario_config_checker.py

@@ -118,10 +118,9 @@ class _ScenarioConfigChecker(_ConfigChecker):
     def _check_tasks_in_sequences_exist_in_scenario_tasks(
         self, scenario_config_id: str, scenario_config: ScenarioConfig
     ):
-        scenario_task_ids = set()
-        for task_config in scenario_config.tasks:
-            if isinstance(task_config, TaskConfig):
-                scenario_task_ids.add(task_config.id)
+        scenario_task_ids = {
+            task_config.id for task_config in scenario_config.tasks if isinstance(task_config, TaskConfig)
+        }
         for sequence_tasks in scenario_config.sequences.values():
             self._check_children(
                 ScenarioConfig,

+ 7 - 8
taipy/core/config/checkers/_task_config_checker.py

@@ -70,11 +70,10 @@ class _TaskConfigChecker(_ConfigChecker):
                 task_config.function,
                 f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` is empty.",
             )
-        else:
-            if not callable(task_config.function):
-                self._error(
-                    task_config._FUNCTION,
-                    task_config.function,
-                    f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` must be"
-                    f" populated with Callable value.",
-                )
+        elif not callable(task_config.function):
+            self._error(
+                task_config._FUNCTION,
+                task_config.function,
+                f"{task_config._FUNCTION} field of TaskConfig `{task_config_id}` must be"
+                f" populated with Callable value.",
+            )

+ 1 - 4
taipy/core/config/data_node_config.py

@@ -327,10 +327,7 @@ class DataNodeConfig(Section):
     def cacheable(self):
         _warn_deprecated("cacheable", suggest="the skippable feature")
         cacheable = self._properties.get("cacheable")
-        if cacheable is not None:
-            return _tpl._replace_templates(cacheable)
-        else:
-            return False
+        return _tpl._replace_templates(cacheable) if cacheable is not None else False
 
     @cacheable.setter  # type: ignore
     @_ConfigBlocker._check()

+ 1 - 2
taipy/core/config/job_config.py

@@ -66,8 +66,7 @@ class JobConfig(UniqueSection):
     @classmethod
     def _from_dict(cls, config_as_dict: Dict[str, Any], id=None, config: Optional[_Config] = None):
         mode = config_as_dict.pop(cls._MODE_KEY, None)
-        job_config = JobConfig(mode, **config_as_dict)
-        return job_config
+        return JobConfig(mode, **config_as_dict)
 
     def _update(self, as_dict: Dict[str, Any], default_section=None):
         mode = _tpl._replace_templates(as_dict.pop(self._MODE_KEY, self.mode))

+ 7 - 10
taipy/core/config/scenario_config.py

@@ -89,7 +89,7 @@ class ScenarioConfig(Section):
 
     def __copy__(self):
         comp = None if self.comparators is None else self.comparators
-        scenario_config = ScenarioConfig(
+        return ScenarioConfig(
             self.id,
             copy(self._tasks),
             copy(self._additional_data_nodes),
@@ -98,7 +98,6 @@ class ScenarioConfig(Section):
             copy(self.sequences),
             **copy(self._properties),
         )
-        return scenario_config
 
     def __getattr__(self, item: str) -> Optional[Any]:
         return _tpl._replace_templates(self._properties.get(item))
@@ -137,11 +136,11 @@ class ScenarioConfig(Section):
 
     @classmethod
     def default_config(cls):
-        return ScenarioConfig(cls._DEFAULT_KEY, list(), list(), None, dict())
+        return ScenarioConfig(cls._DEFAULT_KEY, [], [], None, dict())
 
     def _clean(self):
-        self._tasks = list()
-        self._additional_data_nodes = list()
+        self._tasks = []
+        self._additional_data_nodes = []
         self.frequency = None
         self.comparators = dict()
         self.sequences = dict()
@@ -161,9 +160,9 @@ class ScenarioConfig(Section):
     def _from_dict(cls, as_dict: Dict[str, Any], id: str, config: Optional[_Config] = None) -> "ScenarioConfig":  # type: ignore
         as_dict.pop(cls._ID_KEY, id)
 
-        tasks = cls.__get_task_configs(as_dict.pop(cls._TASKS_KEY, list()), config)
+        tasks = cls.__get_task_configs(as_dict.pop(cls._TASKS_KEY, []), config)
 
-        additional_data_node_ids = as_dict.pop(cls._ADDITIONAL_DATA_NODES_KEY, list())
+        additional_data_node_ids = as_dict.pop(cls._ADDITIONAL_DATA_NODES_KEY, [])
         additional_data_nodes = cls.__get_additional_data_node_configs(additional_data_node_ids, config)
 
         frequency = as_dict.pop(cls._FREQUENCY_KEY, None)
@@ -173,7 +172,7 @@ class ScenarioConfig(Section):
         for sequence_name, sequence_tasks in sequences.items():
             sequences[sequence_name] = cls.__get_task_configs(sequence_tasks, config)
 
-        scenario_config = ScenarioConfig(
+        return ScenarioConfig(
             id=id,
             tasks=tasks,
             additional_data_nodes=additional_data_nodes,
@@ -183,8 +182,6 @@ class ScenarioConfig(Section):
             **as_dict,
         )
 
-        return scenario_config
-
     @staticmethod
     def __get_task_configs(task_config_ids: List[str], config: Optional[_Config]):
         task_configs = set()

+ 3 - 3
taipy/core/cycle/cycle.py

@@ -141,11 +141,11 @@ class Cycle(_Entity, _Labeled):
             """
             Source: https://github.com/django/django/blob/main/django/utils/text.py
             """
-            s = str(name).strip().replace(" ", "_")
+            s = name.strip().replace(" ", "_")
             s = re.sub(r"(?u)[^-\w.]", "", s)
             if s in {"", ".", ".."}:
-                raise _SuspiciousFileOperation("Could not derive file name from '%s'" % name)
-            s = str(s).strip().replace(" ", "_")
+                raise _SuspiciousFileOperation(f"Could not derive file name from '{name}'")
+            s = s.strip().replace(" ", "_")
             return re.sub(r"(?u)[^-\w.]", "", s)
 
         return CycleId(_get_valid_filename(Cycle.__SEPARATOR.join([Cycle._ID_PREFIX, name, str(uuid.uuid4())])))

+ 1 - 1
taipy/core/data/_abstract_sql.py

@@ -148,7 +148,7 @@ class _AbstractSQLDataNode(DataNode, _AbstractTabularDataNode):
 
         if missing := set(required) - set(properties.keys()):
             raise MissingRequiredProperty(
-                f"The following properties " f"{', '.join(x for x in missing)} were not informed and are required."
+                f"The following properties {', '.join(missing)} were not informed and are required."
             )
 
     def _get_engine(self):

+ 1 - 2
taipy/core/data/_data_manager.py

@@ -136,8 +136,7 @@ class _DataManager(_Manager[DataNode], _VersionMixin):
 
     @classmethod
     def _delete(cls, data_node_id: DataNodeId):
-        data_node = cls._get(data_node_id, None)
-        if data_node:
+        if data_node := cls._get(data_node_id, None):
             cls._clean_pickle_file(data_node)
             cls._remove_dn_file_path_in_backup_file(data_node)
         super()._delete(data_node_id)

+ 22 - 23
taipy/core/data/_filter.py

@@ -25,7 +25,7 @@ from .operator import JoinOperator, Operator
 class _FilterDataNode:
     @staticmethod
     def __is_pandas_object(data) -> bool:
-        return isinstance(data, pd.DataFrame) or isinstance(data, pd.Series)
+        return isinstance(data, (pd.DataFrame, pd.Series))
 
     @staticmethod
     def __is_multi_sheet_excel(data) -> bool:
@@ -81,7 +81,7 @@ class _FilterDataNode:
         if _FilterDataNode.__is_pandas_object(data):
             return data[key]
         if _FilterDataNode.__is_list_of_dict(data):
-            filtered_data = list()
+            filtered_data = []
             for i, row in key.iterrows():
                 filtered_row = dict()
                 for col in row.index:
@@ -100,10 +100,8 @@ class _FilterDataNode:
     def __getitem_iterable(data, keys):
         if _FilterDataNode.__is_pandas_object(data):
             return data[keys]
-        filtered_data = []
-        for entry in data:
-            filtered_data.append({k: getattr(entry, k) for k in keys if hasattr(entry, k)})
-        return filtered_data
+
+        return [{k: getattr(entry, k) for k in keys if hasattr(entry, k)} for entry in data]
 
     @staticmethod
     def _filter(data, operators: Union[List, Tuple], join_operator=JoinOperator.AND):
@@ -113,7 +111,7 @@ class _FilterDataNode:
         if isinstance(data, Dict):
             return {k: _FilterDataNode._filter(v, operators, join_operator) for k, v in data.items()}
 
-        if not ((isinstance(operators[0], list)) or (isinstance(operators[0], tuple))):
+        if not isinstance(operators[0], (list, tuple)):
             if isinstance(data, pd.DataFrame):
                 return _FilterDataNode.__filter_dataframe_per_key_value(data, operators[0], operators[1], operators[2])
             if isinstance(data, np.ndarray):
@@ -131,25 +129,23 @@ class _FilterDataNode:
         raise NotImplementedError
 
     @staticmethod
-    def __filter_dataframe(
-        df_data: pd.DataFrame, operators: Union[List, Tuple], join_operator=JoinOperator.AND
-    ):
-        filtered_df_data = []
+    def __filter_dataframe(df_data: pd.DataFrame, operators: Union[List, Tuple], join_operator=JoinOperator.AND):
         if join_operator == JoinOperator.AND:
             how = "inner"
         elif join_operator == JoinOperator.OR:
             how = "outer"
         else:
             return NotImplementedError
-        for key, value, operator in operators:
-            filtered_df_data.append(_FilterDataNode.__filter_dataframe_per_key_value(df_data, key, value, operator))
+
+        filtered_df_data = [
+            _FilterDataNode.__filter_dataframe_per_key_value(df_data, key, value, operator)
+            for key, value, operator in operators
+        ]
 
         return _FilterDataNode.__dataframe_merge(filtered_df_data, how) if filtered_df_data else pd.DataFrame()
 
     @staticmethod
-    def __filter_dataframe_per_key_value(
-        df_data: pd.DataFrame, key: str, value, operator: Operator
-    ):
+    def __filter_dataframe_per_key_value(df_data: pd.DataFrame, key: str, value, operator: Operator):
         df_by_col = df_data[key]
         if operator == Operator.EQUAL:
             df_by_col = df_by_col == value
@@ -171,9 +167,10 @@ class _FilterDataNode:
 
     @staticmethod
     def __filter_numpy_array(data: np.ndarray, operators: Union[List, Tuple], join_operator=JoinOperator.AND):
-        conditions = []
-        for key, value, operator in operators:
-            conditions.append(_FilterDataNode.__get_filter_condition_per_key_value(data, key, value, operator))
+        conditions = [
+            _FilterDataNode.__get_filter_condition_per_key_value(data, key, value, operator)
+            for key, value, operator in operators
+        ]
 
         if join_operator == JoinOperator.AND:
             join_conditions = reduce(and_, conditions)
@@ -206,11 +203,13 @@ class _FilterDataNode:
 
     @staticmethod
     def __filter_list(list_data: List, operators: Union[List, Tuple], join_operator=JoinOperator.AND):
-        filtered_list_data = []
-        for key, value, operator in operators:
-            filtered_list_data.append(_FilterDataNode.__filter_list_per_key_value(list_data, key, value, operator))
-        if len(filtered_list_data) == 0:
+        filtered_list_data = [
+            _FilterDataNode.__filter_list_per_key_value(list_data, key, value, operator)
+            for key, value, operator in operators
+        ]
+        if not filtered_list_data:
             return filtered_list_data
+
         if join_operator == JoinOperator.AND:
             return _FilterDataNode.__list_intersect(filtered_list_data)
         elif join_operator == JoinOperator.OR:

+ 1 - 1
taipy/core/data/aws_s3.py

@@ -104,7 +104,7 @@ class S3ObjectDataNode(DataNode):
         required = self._REQUIRED_PROPERTIES
         if missing := set(required) - set(properties.keys()):
             raise MissingRequiredProperty(
-                f"The following properties " f"{', '.join(x for x in missing)} were not informed and are required."
+                f"The following properties {', '.join(missing)} were not informed and are required."
             )
         super().__init__(
             config_id,

+ 2 - 6
taipy/core/data/csv.py

@@ -182,16 +182,12 @@ class CSVDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
 
     def _read_as(self):
         with open(self._path, encoding=self.properties[self.__ENCODING_KEY]) as csvFile:
-            res = list()
             if self.properties[self._HAS_HEADER_PROPERTY]:
                 reader = csv.DictReader(csvFile)
-                for line in reader:
-                    res.append(self._decoder(line))
             else:
                 reader = csv.reader(csvFile)
-                for line in reader:
-                    res.append(self._decoder(line))
-            return res
+
+            return [self._decoder(line) for line in reader]
 
     def _read_as_numpy(self) -> np.ndarray:
         return self._read_as_pandas_dataframe().to_numpy()

+ 7 - 12
taipy/core/data/data_node.py

@@ -117,7 +117,7 @@ class DataNode(_Entity, _Labeled):
         self._editor_expiration_date: Optional[datetime] = editor_expiration_date
 
         # Track edits
-        self._edits = edits or list()
+        self._edits = edits or []
 
         self._properties = _Properties(self, **kwargs)
 
@@ -153,9 +153,7 @@ class DataNode(_Entity, _Labeled):
         Returns:
             None if there has been no `Edit^` on this data node.
         """
-        if self._edits:
-            return self._edits[-1]
-        return None
+        return self._edits[-1] if self._edits else None
 
     @property  # type: ignore
     @_self_reload(_MANAGER_NAME)
@@ -380,10 +378,7 @@ class DataNode(_Entity, _Labeled):
             options (dict[str, any)): track `timestamp`, `comments`, `job_id`. The others are user-custom, users can
                 use options to attach any information to an external edit of a data node.
         """
-        edit = {}
-        for k, v in options.items():
-            if v is not None:
-                edit[k] = v
+        edit = {k: v for k, v in options.items() if v is not None}
         if "timestamp" not in edit:
             edit["timestamp"] = datetime.now()
         self.last_edit_date = edit.get("timestamp")
@@ -429,10 +424,10 @@ class DataNode(_Entity, _Labeled):
             and self.editor_expiration_date > datetime.now()
         ):
             raise DataNodeIsBeingEdited(self.id, self._editor_id)
-        else:
-            self.editor_id = None  # type: ignore
-            self.editor_expiration_date = None  # type: ignore
-            self.edit_in_progress = False  # type: ignore
+
+        self.editor_id = None
+        self.editor_expiration_date = None
+        self.edit_in_progress = False
 
     def filter(self, operators: Union[List, Tuple], join_operator=JoinOperator.AND):
         """Read and filter the data referenced by this data node.

+ 2 - 5
taipy/core/data/excel.py

@@ -235,9 +235,7 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
                             work_books[sheet_name] = self._read_as_pandas_dataframe(sheet_name)
                         continue
 
-                res = list()
-                for row in work_sheet.rows:
-                    res.append([col.value for col in row])
+                res = [[col.value for col in row] for row in work_sheet.rows]
                 if self.properties[self._HAS_HEADER_PROPERTY] and res:
                     header = res.pop(0)
                     for i, row in enumerate(res):
@@ -322,8 +320,7 @@ class ExcelDataNode(DataNode, _AbstractFileDataNode, _AbstractTabularDataNode):
             self.__append_excel_with_single_sheet(pd.DataFrame(data).to_excel, index=False, header=False)
 
     def __write_excel_with_single_sheet(self, write_excel_fct, *args, **kwargs):
-        sheet_name = self.properties.get(self.__SHEET_NAME_PROPERTY)
-        if sheet_name:
+        if sheet_name := self.properties.get(self.__SHEET_NAME_PROPERTY):
             if not isinstance(sheet_name, str):
                 if len(sheet_name) > 1:
                     raise SheetNameLengthMismatch

+ 3 - 3
taipy/core/data/generic.py

@@ -81,14 +81,14 @@ class GenericDataNode(DataNode):
             properties = {}
         if missing := set(self._REQUIRED_PROPERTIES) - set(properties.keys()):
             raise MissingRequiredProperty(
-                f"The following properties " f"{', '.join(x for x in missing)} were not informed and are required."
+                f"The following properties {', '.join(missing)} were not informed and are required."
             )
 
         missing_optional_fcts = set(self._REQUIRED_AT_LEAST_ONE_PROPERTY) - set(properties.keys())
         if len(missing_optional_fcts) == len(self._REQUIRED_AT_LEAST_ONE_PROPERTY):
             raise MissingRequiredProperty(
-                f"None of the following properties "
-                f"{', '.join(x for x in missing)} were informed and at least one must be populated."
+                f"None of the following properties {', '.join(missing)} were informed"
+                "and at least one must be populated."
             )
         for missing_optional_fct in missing_optional_fcts:
             properties[missing_optional_fct] = None

+ 1 - 1
taipy/core/data/mongo.py

@@ -110,7 +110,7 @@ class MongoCollectionDataNode(DataNode):
         required = self._REQUIRED_PROPERTIES
         if missing := set(required) - set(properties.keys()):
             raise MissingRequiredProperty(
-                f"The following properties " f"{', '.join(x for x in missing)} were not informed and are required."
+                f"The following properties {', '.join(missing)} were not informed and are required."
             )
 
         self._check_custom_document(properties[self._CUSTOM_DOCUMENT_PROPERTY])

+ 1 - 3
taipy/core/job/_job_manager.py

@@ -88,6 +88,4 @@ class _JobManager(_Manager[Job], _VersionMixin):
     def _is_deletable(cls, job: Union[Job, JobId]) -> bool:
         if isinstance(job, str):
             job = cls._get(job)
-        if not job.is_finished():
-            return False
-        return True
+        return job.is_finished()

+ 1 - 11
taipy/core/notification/_topic.py

@@ -23,20 +23,10 @@ class _Topic:
         operation: Optional[EventOperation] = None,
         attribute_name: Optional[str] = None,
     ):
-
         self.entity_type = entity_type
         self.entity_id = entity_id
         self.operation = self.__preprocess_operation(operation, self.entity_type)
-        self.attribute_name = self.__preprocess_attribute_name(attribute_name, self.operation)
-
-    @classmethod
-    def __preprocess_attribute_name(
-        cls, attribute_name: Optional[str] = None, operation: Optional[EventOperation] = None
-    ) -> Optional[str]:
-
-        # if operation in _NO_ATTRIBUTE_NAME_OPERATIONS and attribute_name is not None:
-        #     raise InvalidEventAttributeName
-        return attribute_name
+        self.attribute_name = attribute_name
 
     @classmethod
     def __preprocess_operation(

+ 11 - 20
taipy/core/scenario/_scenario_manager.py

@@ -142,7 +142,7 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
                     sequence_tasks.append(task)
                 else:
                     non_existing_sequence_task_config_in_scenario_config.add(sequence_task_config.id)
-            if len(non_existing_sequence_task_config_in_scenario_config) > 0:
+            if non_existing_sequence_task_config_in_scenario_config:
                 raise SequenceTaskConfigDoesNotExistInSameScenarioConfig(
                     list(non_existing_sequence_task_config_in_scenario_config), sequence_name, str(config.id)
                 )
@@ -254,11 +254,7 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
 
     @classmethod
     def _get_all_by_tag(cls, tag: str) -> List[Scenario]:
-        scenarios = []
-        for scenario in cls._get_all():
-            if scenario.has_tag(tag):
-                scenarios.append(scenario)
-        return scenarios
+        return [scenario for scenario in cls._get_all() if scenario.has_tag(tag)]
 
     @classmethod
     def _get_all_by_cycle(cls, cycle: Cycle) -> List[Scenario]:
@@ -272,11 +268,7 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
 
     @classmethod
     def _get_primary_scenarios(cls) -> List[Scenario]:
-        primary_scenarios = []
-        for scenario in cls._get_all():
-            if scenario.is_primary:
-                primary_scenarios.append(scenario)
-        return primary_scenarios
+        return [scenario for scenario in cls._get_all() if scenario.is_primary]
 
     @classmethod
     def _is_promotable_to_primary(cls, scenario: Union[Scenario, ScenarioId]) -> bool:
@@ -288,25 +280,24 @@ class _ScenarioManager(_Manager[Scenario], _VersionMixin):
 
     @classmethod
     def _set_primary(cls, scenario: Scenario):
-        if scenario.cycle:
-            primary_scenario = cls._get_primary(scenario.cycle)
-            # To prevent SAME scenario updating out of Context Manager
-            if primary_scenario and primary_scenario != scenario:
-                primary_scenario.is_primary = False  # type: ignore
-            scenario.is_primary = True  # type: ignore
-        else:
+        if not scenario.cycle:
             raise DoesNotBelongToACycle(
                 f"Can't set scenario {scenario.id} to primary because it doesn't belong to a cycle."
             )
 
+        primary_scenario = cls._get_primary(scenario.cycle)
+        # To prevent SAME scenario updating out of Context Manager
+        if primary_scenario and primary_scenario != scenario:
+            primary_scenario.is_primary = False  # type: ignore
+        scenario.is_primary = True  # type: ignore
+
     @classmethod
     def _tag(cls, scenario: Scenario, tag: str):
         tags = scenario.properties.get(cls._AUTHORIZED_TAGS_KEY, set())
         if len(tags) > 0 and tag not in tags:
             raise UnauthorizedTagError(f"Tag `{tag}` not authorized by scenario configuration `{scenario.config_id}`")
         if scenario.cycle:
-            old_tagged_scenario = cls._get_by_tag(scenario.cycle, tag)
-            if old_tagged_scenario:
+            if old_tagged_scenario := cls._get_by_tag(scenario.cycle, tag):
                 old_tagged_scenario.remove_tag(tag)
                 cls._set(old_tagged_scenario)
         scenario._add_tag(tag)

+ 5 - 6
taipy/core/sequence/_sequence_manager.py

@@ -151,13 +151,12 @@ class _SequenceManager(_Manager[Sequence], _VersionMixin):
         task_manager = _TaskManagerFactory._build_manager()
         _tasks: List[Task] = []
         for task in tasks:
-            if not isinstance(task, Task):
-                if _task := task_manager._get(task):
-                    _tasks.append(_task)
-                else:
-                    raise NonExistingTask(task)
-            else:
+            if isinstance(task, Task):
                 _tasks.append(task)
+            elif _task := task_manager._get(task):
+                _tasks.append(_task)
+            else:
+                raise NonExistingTask(task)
 
         properties = properties if properties else {}
         properties["name"] = sequence_name

+ 2 - 6
taipy/core/submission/submission.py

@@ -123,13 +123,8 @@ class Submission(_Entity, _Labeled):
     def jobs(self) -> List[Job]:
         from ..job._job_manager_factory import _JobManagerFactory
 
-        jobs = []
         job_manager = _JobManagerFactory._build_manager()
-
-        for job in self._jobs:
-            jobs.append(job_manager._get(job))
-
-        return jobs
+        return [job_manager._get(job) for job in self._jobs]
 
     @jobs.setter  # type: ignore
     @_self_setter(_MANAGER_NAME)
@@ -196,6 +191,7 @@ class Submission(_Entity, _Labeled):
 
     def _update_submission_status(self, job: Job):
         from ._submission_manager_factory import _SubmissionManagerFactory
+
         with self.lock:
             submission_manager = _SubmissionManagerFactory._build_manager()
             submission = submission_manager._get(self)

+ 1 - 2
taipy/core/task/task.py

@@ -165,8 +165,7 @@ class Task(_Entity, _Labeled):
                 either no input or no output.
         """
         data_nodes = list(self.__input.values()) + list(self.__output.values())
-        scope = Scope(min(dn.scope for dn in data_nodes)) if len(data_nodes) != 0 else Scope.GLOBAL
-        return scope
+        return Scope(min(dn.scope for dn in data_nodes)) if len(data_nodes) != 0 else Scope.GLOBAL
 
     @property
     def version(self):

+ 2 - 4
taipy/gui/_renderers/factory.py

@@ -605,8 +605,7 @@ class _Factory:
             for lib in _Factory.__LIBRARIES.get(parts[0], []):
                 elts = lib.get_elements()
                 if isinstance(elts, dict):
-                    element = elts.get(element_name)
-                    if element:
+                    if element := elts.get(element_name):
                         return lib, element_name, element
         else:
             element_name = name
@@ -614,8 +613,7 @@ class _Factory:
                 for lib in libs:
                     elts = lib.get_elements()
                     if isinstance(elts, dict):
-                        element = elts.get(element_name)
-                        if element:
+                        if element := elts.get(element_name):
                             return lib, element_name, element
         return None, None, None
 

+ 1 - 4
taipy/gui/extension/library.py

@@ -172,10 +172,7 @@ class Element:
             xhtml = self._render_xhtml(attributes)
             try:
                 xml_root = etree.fromstring(xhtml)
-                if is_html:
-                    return xhtml, name
-                else:
-                    return xml_root
+                return (xhtml, name) if is_html else xml_root
 
             except Exception as e:
                 _warn(f"{name}.render_xhtml() did not return a valid XHTML string", e)

+ 2 - 2
taipy/gui/gui.py

@@ -2031,7 +2031,7 @@ class Gui:
     def _call_on_exception(self, function_name: str, exception: Exception) -> bool:
         if hasattr(self, "on_exception") and callable(self.on_exception):
             try:
-                self.on_exception(self.__get_state(), str(function_name), exception)
+                self.on_exception(self.__get_state(), function_name, exception)
             except Exception as e:  # pragma: no cover
                 _warn("Exception raised in on_exception()", e)
             return True
@@ -2118,7 +2118,7 @@ class Gui:
                 to=page_name,
                 params={
                     _Server._RESOURCE_HANDLER_ARG: pr._resource_handler.get_id(),
-                    _Server._CUSTOM_PAGE_META_ARG: json.dumps(pr._metadata, cls=_TaipyJsonEncoder)
+                    _Server._CUSTOM_PAGE_META_ARG: json.dumps(pr._metadata, cls=_TaipyJsonEncoder),
                 },
             ):
                 # Proactively handle the bindings of custom page variables

+ 20 - 18
taipy/gui/utils/_evaluator.py

@@ -240,24 +240,26 @@ class _Evaluator:
         This function will execute when the __request_var_update function receive a refresh order
         """
         expr = self.__hash_to_expr.get(var_name)
-        if expr:
-            expr_decoded, _ = _variable_decode(expr)
-            var_map = self.__expr_to_var_map.get(expr, {})
-            eval_dict = {k: _getscopeattr_drill(gui, gui._bind_var(v)) for k, v in var_map.items()}
-            if self._is_expression(expr_decoded):
-                expr_string = 'f"' + _variable_decode(expr)[0].replace('"', '\\"') + '"'
-            else:
-                expr_string = expr_decoded
-            try:
-                ctx: t.Dict[str, t.Any] = {}
-                ctx.update(self.__global_ctx)
-                ctx.update(eval_dict)
-                expr_evaluated = eval(expr_string, ctx)
-                _setscopeattr(gui, var_name, expr_evaluated)
-                if holder is not None:
-                    holder.set(expr_evaluated)
-            except Exception as e:
-                _warn(f"Exception raised evaluating {expr_string}", e)
+        if not expr:
+            return
+
+        expr_decoded, _ = _variable_decode(expr)
+        var_map = self.__expr_to_var_map.get(expr, {})
+        eval_dict = {k: _getscopeattr_drill(gui, gui._bind_var(v)) for k, v in var_map.items()}
+        if self._is_expression(expr_decoded):
+            expr_string = 'f"' + _variable_decode(expr)[0].replace('"', '\\"') + '"'
+        else:
+            expr_string = expr_decoded
+        try:
+            ctx: t.Dict[str, t.Any] = {}
+            ctx.update(self.__global_ctx)
+            ctx.update(eval_dict)
+            expr_evaluated = eval(expr_string, ctx)
+            _setscopeattr(gui, var_name, expr_evaluated)
+            if holder is not None:
+                holder.set(expr_evaluated)
+        except Exception as e:
+            _warn(f"Exception raised evaluating {expr_string}", e)
 
     def re_evaluate_expr(self, gui: Gui, var_name: str) -> t.Set[str]:
         """

+ 3 - 6
taipy/gui/utils/html.py

@@ -14,9 +14,6 @@ import typing as t
 
 def _get_css_var_value(value: t.Any) -> str:
     if isinstance(value, str):
-        if " " in value:
-            return f'"{value}"'
-        return value
-    if isinstance(value, int):
-        return f"{value}px"
-    return f"{value}"
+        return f'"{value}"' if " " in value else value
+
+    return f"{value}px" if isinstance(value, int) else f"{value}"

+ 4 - 7
taipy/gui_core/_adapters.py

@@ -59,8 +59,7 @@ class _GuiCoreScenarioAdapter(_TaipyBase):
         data = super().get()
         if isinstance(data, Scenario):
             try:
-                scenario = core_get(data.id)
-                if scenario:
+                if scenario := core_get(data.id):
                     return [
                         scenario.id,
                         scenario.is_primary,
@@ -116,9 +115,8 @@ class _GuiCoreScenarioDagAdapter(_TaipyBase):
         data = super().get()
         if isinstance(data, Scenario):
             try:
-                scenario = core_get(data.id)
-                if scenario:
-                    dag = data._get_dag()
+                if scenario := core_get(data.id):
+                    dag = scenario._get_dag()
                     nodes = dict()
                     for id, node in dag.nodes.items():
                         entityType = _GuiCoreScenarioDagAdapter.get_entity_type(node)
@@ -166,8 +164,7 @@ class _GuiCoreDatanodeAdapter(_TaipyBase):
         data = super().get()
         if isinstance(data, DataNode):
             try:
-                datanode = core_get(data.id)
-                if datanode:
+                if datanode := core_get(data.id):
                     owner = core_get(datanode.owner_id) if datanode.owner_id else None
                     return [
                         datanode.id,

+ 24 - 25
taipy/gui_core/_context.py

@@ -509,35 +509,35 @@ class _GuiCoreContext(CoreEventConsumerBase):
             if hasattr(data, "id") and is_readable(data.id) and core_get(data.id) is not None:
                 if isinstance(data, DataNode):
                     return (data.id, data.get_simple_label(), None, _EntityType.DATANODE.value, False)
-                else:
-                    with self.lock:
-                        self.__do_datanodes_tree()
-                        if self.data_nodes_by_owner:
-                            if isinstance(data, Cycle):
+
+                with self.lock:
+                    self.__do_datanodes_tree()
+                    if self.data_nodes_by_owner:
+                        if isinstance(data, Cycle):
+                            return (
+                                data.id,
+                                data.get_simple_label(),
+                                self.data_nodes_by_owner[data.id] + self.scenario_by_cycle.get(data, []),
+                                _EntityType.CYCLE.value,
+                                False,
+                            )
+                        elif isinstance(data, Scenario):
+                            return (
+                                data.id,
+                                data.get_simple_label(),
+                                self.data_nodes_by_owner[data.id] + list(data.sequences.values()),
+                                _EntityType.SCENARIO.value,
+                                data.is_primary,
+                            )
+                        elif isinstance(data, Sequence):
+                            if datanodes := self.data_nodes_by_owner.get(data.id):
                                 return (
                                     data.id,
                                     data.get_simple_label(),
-                                    self.data_nodes_by_owner[data.id] + self.scenario_by_cycle.get(data, []),
-                                    _EntityType.CYCLE.value,
+                                    datanodes,
+                                    _EntityType.SEQUENCE.value,
                                     False,
                                 )
-                            elif isinstance(data, Scenario):
-                                return (
-                                    data.id,
-                                    data.get_simple_label(),
-                                    self.data_nodes_by_owner[data.id] + list(data.sequences.values()),
-                                    _EntityType.SCENARIO.value,
-                                    data.is_primary,
-                                )
-                            elif isinstance(data, Sequence):
-                                if datanodes := self.data_nodes_by_owner.get(data.id):
-                                    return (
-                                        data.id,
-                                        data.get_simple_label(),
-                                        datanodes,
-                                        _EntityType.SEQUENCE.value,
-                                        False,
-                                    )
         except Exception as e:
             _warn(
                 f"Access to {type(data)} ({data.id if hasattr(data, 'id') else 'No_id'}) failed",
@@ -957,4 +957,3 @@ class _GuiCoreContext(CoreEventConsumerBase):
                     _warn(f"dag.on_action(): Exception raised in '{args[1]}()' with '{args[0]}'", e)
         elif args[1]:
             _warn(f"dag.on_action(): Invalid function '{args[1]}()'.")
-

+ 4 - 5
taipy/rest/api/resources/cycle.py

@@ -27,11 +27,10 @@ REPOSITORY = "cycle"
 
 
 def _get_or_raise(cycle_id: str) -> Cycle:
-    manager = _CycleManagerFactory._build_manager()
-    cycle = manager._get(cycle_id)
-    if not cycle:
-        raise NonExistingCycle(cycle_id)
-    return cycle
+    if cycle := _CycleManagerFactory._build_manager()._get(cycle_id):
+        return cycle
+
+    raise NonExistingCycle(cycle_id)
 
 
 class CycleResource(Resource):

+ 8 - 9
taipy/rest/api/resources/datanode.py

@@ -55,11 +55,10 @@ REPOSITORY = "data"
 
 
 def _get_or_raise(data_node_id: str) -> DataNode:
-    manager = _DataManagerFactory._build_manager()
-    data_node = manager._get(data_node_id)
-    if not data_node:
-        raise NonExistingDataNode(data_node_id)
-    return data_node
+    if data_node := _DataManagerFactory._build_manager()._get(data_node_id):
+        return data_node
+
+    raise NonExistingDataNode(data_node_id)
 
 
 class DataNodeResource(Resource):
@@ -459,10 +458,10 @@ class DataNodeList(Resource):
         self.logger = kwargs.get("logger")
 
     def fetch_config(self, config_id):
-        config = Config.data_nodes.get(config_id)
-        if not config:
-            raise NonExistingDataNodeConfig(config_id)
-        return config
+        if config := Config.data_nodes.get(config_id):
+            return config
+
+        raise NonExistingDataNodeConfig(config_id)
 
     @_middleware
     def get(self):

+ 8 - 9
taipy/rest/api/resources/job.py

@@ -27,11 +27,10 @@ from ..schemas import JobSchema
 
 
 def _get_or_raise(job_id: str) -> Job:
-    manager = _JobManagerFactory._build_manager()
-    job = manager._get(job_id)
-    if job is None:
-        raise NonExistingJob(job_id)
-    return job
+    if job := _JobManagerFactory._build_manager()._get(job_id):
+        return job
+
+    raise NonExistingJob(job_id)
 
 
 class JobResource(Resource):
@@ -198,10 +197,10 @@ class JobList(Resource):
         self.logger = kwargs.get("logger")
 
     def fetch_config(self, config_id):
-        config = Config.tasks.get(config_id)
-        if not config:
-            raise NonExistingTaskConfig(config_id)
-        return config
+        if config := Config.tasks.get(config_id):
+            return config
+
+        raise NonExistingTaskConfig(config_id)
 
     @_middleware
     def get(self):

+ 8 - 9
taipy/rest/api/resources/scenario.py

@@ -24,11 +24,10 @@ from ..schemas import ScenarioResponseSchema
 
 
 def _get_or_raise(scenario_id: str) -> Scenario:
-    manager = _ScenarioManagerFactory._build_manager()
-    scenario = manager._get(scenario_id)
-    if scenario is None:
-        raise NonExistingScenario(scenario_id)
-    return scenario
+    if scenario := _ScenarioManagerFactory._build_manager()._get(scenario_id):
+        return scenario
+
+    raise NonExistingScenario(scenario_id)
 
 
 REPOSITORY = "scenario"
@@ -415,10 +414,10 @@ class ScenarioList(Resource):
         self.logger = kwargs.get("logger")
 
     def fetch_config(self, config_id):
-        config = Config.scenarios.get(config_id)
-        if not config:
-            raise NonExistingScenarioConfig(config_id)
-        return config
+        if config := Config.scenarios.get(config_id):
+            return config
+
+        raise NonExistingScenarioConfig(config_id)
 
     @_middleware
     def get(self):

+ 4 - 5
taipy/rest/api/resources/sequence.py

@@ -25,11 +25,10 @@ from ..schemas import SequenceResponseSchema
 
 
 def _get_or_raise(sequence_id: str) -> Sequence:
-    manager = _SequenceManagerFactory._build_manager()
-    sequence = manager._get(sequence_id)
-    if sequence is None:
-        raise NonExistingSequence(sequence_id)
-    return sequence
+    if sequence := _SequenceManagerFactory._build_manager()._get(sequence_id):
+        return sequence
+
+    raise NonExistingSequence(sequence_id)
 
 
 REPOSITORY = "sequence"

+ 8 - 9
taipy/rest/api/resources/task.py

@@ -24,11 +24,10 @@ from ..schemas import TaskSchema
 
 
 def _get_or_raise(task_id: str) -> Task:
-    manager = _TaskManagerFactory._build_manager()
-    task = manager._get(task_id)
-    if task is None:
-        raise NonExistingTask(task_id)
-    return task
+    if task := _TaskManagerFactory._build_manager()._get(task_id):
+        return task
+
+    raise NonExistingTask(task_id)
 
 
 REPOSITORY = "task"
@@ -196,10 +195,10 @@ class TaskList(Resource):
         self.logger = kwargs.get("logger")
 
     def fetch_config(self, config_id):
-        config = Config.tasks.get(config_id)
-        if not config:
-            raise NonExistingTaskConfig(config_id)
-        return config
+        if config := Config.tasks.get(config_id):
+            return config
+
+        raise NonExistingTaskConfig(config_id)
 
     @_middleware
     def get(self):

+ 2 - 3
taipy/rest/commons/apispec.py

@@ -35,8 +35,7 @@ class FlaskRestfulPlugin(FlaskPlugin):
             raise APISpecError("Could not find endpoint for view {0}".format(view))
 
         # WARNING: Assume 1 rule per view function for now
-        rule = app.url_map._rules_by_endpoint[endpoint][0]
-        return rule
+        return app.url_map._rules_by_endpoint[endpoint][0]
 
 
 class APISpecExt:
@@ -63,7 +62,7 @@ class APISpecExt:
             version=app.config["APISPEC_VERSION"],
             openapi_version=app.config["OPENAPI_VERSION"],
             plugins=[MarshmallowPlugin(), FlaskRestfulPlugin()],
-            **kwargs
+            **kwargs,
         )
 
         blueprint = Blueprint(

+ 3 - 4
taipy/rest/commons/encoder.py

@@ -20,9 +20,8 @@ Json = Union[dict, list, str, int, float, bool, None]
 class _CustomEncoder(json.JSONEncoder):
     def default(self, o: Any) -> Json:
         if isinstance(o, Enum):
-            result = o.value
+            return o.value
         elif isinstance(o, datetime):
-            result = {"__type__": "Datetime", "__value__": o.isoformat()}
+            return {"__type__": "Datetime", "__value__": o.isoformat()}
         else:
-            result = json.JSONEncoder.default(self, o)
-        return result
+            return json.JSONEncoder.default(self, o)