فهرست منبع

- Replace "str" + "str" with f"str {str}"
- Remove some noqa: B009
- minor cleaning and formatting

jrobinAV 1 سال پیش
والد
کامیت
9bff1940c2

+ 2 - 3
taipy/core/_core_cli.py

@@ -113,6 +113,5 @@ class _CoreCLI:
     @classmethod
     def __add_taipy_prefix(cls, key: str):
         if key.startswith("--no-"):
-            return key[:5] + "taipy-" + key[5:]
-
-        return key[:2] + "taipy-" + key[2:]
+            return f"{key[:5]}taipy-{key[5:]}"
+        return f"{key[:2]}taipy-{key[2:]}"

+ 3 - 7
taipy/core/_entity/_dag.py

@@ -54,7 +54,7 @@ class _DAG:
         return self._edges
 
     def __compute_size(self) -> Tuple[int, int]:
-        return len(self._sorted_nodes), max([len(i) for i in self._sorted_nodes])
+        return len(self._sorted_nodes), max(len(i) for i in self._sorted_nodes)
 
     def __compute_grid_size(self) -> Tuple[int, int]:
         if self._width == 1:
@@ -65,8 +65,7 @@ class _DAG:
 
     def __compute_nodes(self) -> Dict[str, _Node]:
         nodes = {}
-        x = 0
-        for same_lvl_nodes in self._sorted_nodes:
+        for x, same_lvl_nodes in enumerate(self._sorted_nodes):
             lcl_wdt = len(same_lvl_nodes)
             is_max = lcl_wdt != self.width
             if self.width != 1:
@@ -81,10 +80,7 @@ class _DAG:
         return nodes
 
     def __compute_edges(self, dag) -> List[_Edge]:
-        edges = []
-        for edge in dag.edges():
-            edges.append(_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id]))
-        return edges
+        return [_Edge(self.nodes[edge[0].id], self.nodes[edge[1].id]) for edge in dag.edges()]
 
     @staticmethod
     def __lcm(*integers) -> int:

+ 1 - 1
taipy/core/_entity/_entity.py

@@ -23,7 +23,7 @@ class _Entity:
 
     def __enter__(self):
         self._is_in_context = True
-        self._in_context_attributes_changed_collector = list()
+        self._in_context_attributes_changed_collector = []
         return self
 
     def __exit__(self, exc_type, exc_value, exc_traceback):

+ 5 - 11
taipy/core/_entity/_labeled.py

@@ -52,26 +52,20 @@ class _Labeled:
         return self.__LABEL_SEPARATOR.join(ls)
 
     def _get_explicit_label(self) -> Optional[str]:
-        if hasattr(self, "_properties"):
-            return getattr(self, "_properties").get("label")  # noqa: B009
-        return None
+        return self._properties.get("label") if hasattr(self, "_properties") else None
 
     def _get_owner_id(self) -> Optional[str]:
-        if hasattr(self, "owner_id"):
-            return getattr(self, "owner_id")  # noqa: B009
-        return None
+        return self.owner_id if hasattr(self, "owner_id") else None
 
     def _get_name(self) -> Optional[str]:
         if hasattr(self, "name"):
-            return getattr(self, "name")  # noqa: B009
+            return self.name
         if hasattr(self, "_properties"):
-            return getattr(self, "_properties").get("name")  # noqa: B009
+            return self._properties.get("name")
         return None
 
     def _get_config_id(self) -> Optional[str]:
-        if hasattr(self, "config_id"):
-            return getattr(self, "config_id")  # noqa: B009
-        return None
+        return self.config_id if hasattr(self, "config_id") else None
 
     def _generate_entity_label(self) -> str:
         if name := self._get_name():

+ 2 - 2
taipy/core/_entity/_migrate/_utils.py

@@ -60,8 +60,8 @@ def __search_parent_config(entity_id: str, config: Dict, entity_type: str) -> Li
     possible_parents = "TASK" if entity_type == "DATA_NODE" else "SCENARIO"
     data = config[possible_parents]
 
+    section_id = f"{entity_id}:SECTION"
     for _id, entity_data in data.items():
-        section_id = f"{entity_id}:SECTION"
         if entity_type == "DATANODE" and possible_parents == "TASK":
             if section_id in entity_data["input_ids"] or section_id in entity_data["output_ids"]:
                 parents.append(section_id)
@@ -281,7 +281,7 @@ def __migrate_entities(entity_type: str, data: Dict) -> Dict:
     _entities = {k: data[k] for k in data if entity_type in k}
 
     for k, v in _entities.items():
-        if entity_type in ["JOB", "VERSION"]:
+        if entity_type in {"JOB", "VERSION"}:
             v["data"] = migration_fct(v["data"])  # type: ignore
         else:
             v["data"] = migration_fct(v["data"], data)  # type: ignore

+ 1 - 1
taipy/core/_entity/_migrate_cli.py

@@ -78,7 +78,7 @@ class _MigrateCLI:
         if args.remove_backup:
             cls.__handle_remove_backup(repository_type, repository_args)
 
-        do_backup = False if args.skip_backup else True
+        do_backup =  not args.skip_backup
         cls.__migrate_entities(repository_type, repository_args, do_backup)
         sys.exit(0)
 

+ 5 - 8
taipy/core/_entity/_reload.py

@@ -22,10 +22,10 @@ class _Reloader:
 
     _no_reload_context = False
 
-    def __new__(class_, *args, **kwargs):
-        if not isinstance(class_._instance, class_):
-            class_._instance = object.__new__(class_, *args, **kwargs)
-        return class_._instance
+    def __new__(cls, *args, **kwargs):
+        if not isinstance(cls._instance, cls):
+            cls._instance = object.__new__(cls, *args, **kwargs)
+        return cls._instance
 
     def _reload(self, manager: str, obj):
         if self._no_reload_context:
@@ -66,10 +66,7 @@ def _self_setter(manager):
         def _do_set_entity(self, *args, **kwargs):
             fct(self, *args, **kwargs)
             entity_manager = _get_manager(manager)
-            if len(args) == 1:
-                value = args[0]
-            else:
-                value = args
+            value = args[0] if len(args) == 1 else args
             event = _make_event(
                 self,
                 EventOperation.UPDATE,

+ 3 - 4
taipy/core/_entity/submittable.py

@@ -139,8 +139,7 @@ class Submittable:
     def _remove_subscriber(self, callback: Callable, params: Optional[List[Any]] = None):
         if params is not None:
             self._subscribers.remove(_Subscriber(callback, params))
-        else:
-            elem = [x for x in self._subscribers if x.callback == callback]
-            if not elem:
-                raise ValueError
+        elif elem := [x for x in self._subscribers if x.callback == callback]:
             self._subscribers.remove(elem[0])
+        else:
+            raise ValueError

+ 1 - 2
taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py

@@ -38,8 +38,7 @@ class _TaskFunctionWrapper:
     def execute(self, **kwargs):
         """Execute the wrapped function. If `config_as_string` is given, then it will be reapplied to the config."""
         try:
-            config_as_string = kwargs.pop("config_as_string", None)
-            if config_as_string:
+            if config_as_string := kwargs.pop("config_as_string", None):
                 Config._applied_config._update(_TomlSerializer()._deserialize(config_as_string))
                 Config.block_update()
 

+ 21 - 21
taipy/core/_orchestrator/_orchestrator.py

@@ -81,23 +81,22 @@ class _Orchestrator(_AbstractOrchestrator):
         tasks = submittable._get_sorted_tasks()
         with cls.lock:
             for ts in tasks:
-                for task in ts:
-                    jobs.append(
-                        cls._lock_dn_output_and_create_job(
-                            task,
-                            submission.id,
-                            submission.entity_id,
-                            callbacks=itertools.chain([cls._update_submission_status], callbacks or []),
-                            force=force,  # type: ignore
-                        )
+                jobs.extend(
+                    cls._lock_dn_output_and_create_job(
+                        task,
+                        submission.id,
+                        submission.entity_id,
+                        callbacks=itertools.chain([cls._update_submission_status], callbacks or []),
+                        force=force,  # type: ignore
                     )
+                    for task in ts
+                )
         submission.jobs = jobs  # type: ignore
         cls._orchestrate_job_to_run_or_block(jobs)
         if Config.job_config.is_development:
             cls._check_and_execute_jobs_if_development_mode()
-        else:
-            if wait:
-                cls._wait_until_job_finished(jobs, timeout=timeout or 0)
+        elif wait:
+            cls._wait_until_job_finished(jobs, timeout=timeout or 0)
         return submission
 
     @classmethod
@@ -158,11 +157,14 @@ class _Orchestrator(_AbstractOrchestrator):
     ) -> Job:
         for dn in task.output.values():
             dn.lock_edit()
-        job = _JobManagerFactory._build_manager()._create(
-            task, itertools.chain([cls._on_status_change], callbacks or []), submit_id, submit_entity_id, force=force
+        return _JobManagerFactory._build_manager()._create(
+            task,
+            itertools.chain([cls._on_status_change], callbacks or []),
+            submit_id,
+            submit_entity_id,
+            force=force
         )
 
-        return job
 
     @classmethod
     def _update_submission_status(cls, job: Job):
@@ -197,7 +199,7 @@ class _Orchestrator(_AbstractOrchestrator):
         while __check_if_timeout(start, timeout) and index < len(jobs):
             try:
                 if jobs[index]._is_finished():
-                    index = index + 1
+                    index += 1
                 else:
                     sleep(0.5)  # Limit CPU usage
             except Exception:
@@ -308,7 +310,6 @@ class _Orchestrator(_AbstractOrchestrator):
 
     @classmethod
     def _cancel_jobs(cls, job_id_to_cancel: JobId, jobs: Set[Job]):
-
         for job in jobs:
             if job.is_running():
                 cls.__logger.info(f"{job.id} is running and cannot be canceled.")
@@ -316,11 +317,10 @@ class _Orchestrator(_AbstractOrchestrator):
                 cls.__logger.info(f"{job.id} has already been completed and cannot be canceled.")
             elif job.is_skipped():
                 cls.__logger.info(f"{job.id} has already been skipped and cannot be canceled.")
+            elif job_id_to_cancel == job.id:
+                job.canceled()
             else:
-                if job_id_to_cancel == job.id:
-                    job.canceled()
-                else:
-                    job.abandoned()
+                job.abandoned()
 
     @staticmethod
     def _check_and_execute_jobs_if_development_mode():

+ 1 - 2
taipy/core/_repository/_base_taipy_model.py

@@ -24,8 +24,7 @@ class _BaseModel:
     __table__: Table
 
     def __iter__(self):
-        for attr, value in self.__dict__.items():
-            yield attr, value
+        yield from self.__dict__.items()
 
     def to_dict(self) -> Dict[str, Any]:
         model_dict = {**dataclasses.asdict(self)}  # type: ignore[call-overload]

+ 4 - 5
taipy/core/_repository/_encoder.py

@@ -27,14 +27,13 @@ class _Encoder(json.JSONEncoder):
 
     def default(self, o: Any):
         if isinstance(o, Enum):
-            result = o.value
+            return o.value
         elif isinstance(o, datetime):
-            result = {"__type__": "Datetime", "__value__": o.isoformat()}
+            return {"__type__": "Datetime", "__value__": o.isoformat()}
         elif isinstance(o, timedelta):
-            result = {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)}
+            return {"__type__": "Timedelta", "__value__": self._timedelta_to_str(o)}
         else:
-            result = json.JSONEncoder.default(self, o)
-        return result
+            return json.JSONEncoder.default(self, o)
 
 
 def dumps(d):

+ 2 - 6
taipy/core/_repository/_filesystem_repository.py

@@ -170,10 +170,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
     def _get_by_config_and_owner_id(
         self, config_id: str, owner_id: Optional[str], filters: Optional[List[Dict]] = None
     ) -> Optional[Entity]:
-        if not filters:
-            filters = [{}]
-        else:
-            filters = copy.deepcopy(filters)
+        filters = [{}] if not filters else copy.deepcopy(filters)
 
         if owner_id is not None:
             for fil in filters:
@@ -225,8 +222,7 @@ class _FileSystemRepository(_AbstractRepository[ModelType, Entity]):
         if isinstance(file_content, str):
             file_content = json.loads(file_content, cls=_Decoder)
         model = self.model_type.from_dict(file_content)
-        entity = self.converter._model_to_entity(model)
-        return entity
+        return self.converter._model_to_entity(model)
 
     def __filter_by(self, filepath: pathlib.Path, filters: Optional[List[Dict]]) -> Optional[Json]:
         if not filters: