瀏覽代碼

clean C416 rule for codestyle

ooooo 1 年之前
父節點
當前提交
cec2bd2378

+ 1 - 1
doc/gui/examples/charts/treemap-simple.py

@@ -21,7 +21,7 @@ fibonacci = [0, 1]
 for i in range(2, n_numbers):
     fibonacci.append(fibonacci[i - 1] + fibonacci[i - 2])
 
-data = {"index": [i for i in range(1, n_numbers + 1)], "fibonacci": fibonacci}
+data = {"index": list(range(1, n_numbers + 1)), "fibonacci": fibonacci}
 
 page = """
 # TreeMap - Simple

+ 0 - 1
pyproject.toml

@@ -31,7 +31,6 @@ ignore = [      # TODO: to be removed
     "C405",  # Unnecessary list literal - rewrite as a literal
     "C408",  # Unnecessary dict call - rewrite as a literal
     "C409",  # Unnecessary list passed to tuple() - rewrite as a tuple literal
-    "C416",  # Unnecessary `set` comprehension (rewrite using `set()`)
 ]
 
 # Allow fix for all enabled rules (when `--fix`) is provided.

+ 1 - 1
taipy/core/_entity/_dag.py

@@ -31,7 +31,7 @@ class _Edge:
 
 class _DAG:
     def __init__(self, dag: nx.DiGraph):
-        self._sorted_nodes = [nodes for nodes in nx.topological_generations(dag)]
+        self._sorted_nodes = list(nx.topological_generations(dag))
         self._length, self._width = self.__compute_size()
         self._grid_length, self._grid_width = self.__compute_grid_size()
         self._nodes = self.__compute_nodes()

+ 1 - 1
taipy/core/data/_abstract_sql.py

@@ -230,7 +230,7 @@ class _AbstractSQLDataNode(DataNode, _AbstractTabularDataNode):
 
             # On pandas 1.3.5 there's a bug that makes that the dataframe from sqlalchemy query is
             # created without headers
-            keys = [col for col in result.keys()]
+            keys = list(result.keys())
             if columns:
                 return pd.DataFrame(result, columns=keys)[columns]
             return pd.DataFrame(result, columns=keys)

+ 2 - 2
taipy/gui_core/_context.py

@@ -124,7 +124,7 @@ class _GuiCoreContext(CoreEventConsumerBase):
                     if sequence and hasattr(sequence, "parent_ids") and sequence.parent_ids:  # type: ignore
                         self.gui._broadcast(
                             _GuiCoreContext._CORE_CHANGED_NAME,
-                            {"scenario": [x for x in sequence.parent_ids]},  # type: ignore
+                            {"scenario": list(sequence.parent_ids)},  # type: ignore
                         )
             except Exception as e:
                 _warn(f"Access to sequence {event.entity_id} failed", e)
@@ -650,7 +650,7 @@ class _GuiCoreContext(CoreEventConsumerBase):
             if isinstance(ent, Scenario):
                 tags = data.get(_GuiCoreContext.__PROP_SCENARIO_TAGS)
                 if isinstance(tags, (list, tuple)):
-                    ent.tags = {t for t in tags}
+                    ent.tags = dict(tags)
             name = data.get(_GuiCoreContext.__PROP_ENTITY_NAME)
             if isinstance(name, str):
                 if hasattr(ent, _GuiCoreContext.__PROP_ENTITY_NAME):

+ 1 - 1
tests/core/data/test_filter_data_node.py

@@ -244,7 +244,7 @@ def test_filter_by_get_item(default_data_frame):
     filtered_custom_dn = custom_dn["a"]
     assert isinstance(filtered_custom_dn, List)
     assert len(filtered_custom_dn) == 10
-    assert filtered_custom_dn == [i for i in range(10)]
+    assert filtered_custom_dn == list(range(10))
 
     filtered_custom_dn = custom_dn[0:5]
     assert isinstance(filtered_custom_dn, List)

+ 2 - 2
tests/core/data/test_generic_data_node.py

@@ -46,11 +46,11 @@ def read_fct_modify_data_node_name(data_node_id: DataNodeId, name: str):
 
 
 def reset_data():
-    TestGenericDataNode.data = [i for i in range(10)]
+    TestGenericDataNode.data = list(range(10))
 
 
 class TestGenericDataNode:
-    data = [i for i in range(10)]
+    data = list(range(10))
 
     def test_create(self):
         dn = GenericDataNode(

+ 7 - 5
tests/core/job/test_job_manager.py

@@ -411,10 +411,12 @@ def test_cancel_subsequent_jobs():
     assert_true_after_time(job_4.is_canceled)
     assert_true_after_time(job_5.is_abandoned)
     assert_true_after_time(job_6.is_abandoned)
-    assert_true_after_time(lambda: all(
-        not _OrchestratorFactory._orchestrator._is_blocked(job)
-        for job in [job_1, job_2, job_3, job_4, job_5, job_6]
-    ))
+    assert_true_after_time(
+        lambda: all(
+            not _OrchestratorFactory._orchestrator._is_blocked(job)
+            for job in [job_1, job_2, job_3, job_4, job_5, job_6]
+        )
+    )
     assert_true_after_time(lambda: _OrchestratorFactory._orchestrator.jobs_to_run.qsize() == 0)
 
 
@@ -474,7 +476,7 @@ def _create_task(function, nb_outputs=1, name=None):
     output_dn_configs = [
         Config.configure_data_node(f"output{i}", "pickle", Scope.SCENARIO, default_data=0) for i in range(nb_outputs)
     ]
-    _DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs})
+    _DataManager._bulk_get_or_create(dict(output_dn_configs))
     name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
     task_config = Config.configure_task(
         id=name,

+ 1 - 1
tests/core/job/test_job_manager_with_sql_repo.py

@@ -245,7 +245,7 @@ def _create_task(function, nb_outputs=1, name=None):
     output_dn_configs = [
         Config.configure_data_node(f"output{i}", scope=Scope.SCENARIO, default_data=0) for i in range(nb_outputs)
     ]
-    _DataManager._bulk_get_or_create({cfg for cfg in output_dn_configs})
+    _DataManager._bulk_get_or_create(dict(output_dn_configs))
     name = name or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
     task_config = Config.configure_task(
         id=name,