瀏覽代碼

Merge branch 'develop' into feature/#398-expand-exposed-type-parameter

Jean-Robin 5 月之前
父節點
當前提交
d6f49e31f5

+ 6 - 6
.github/PULL_REQUEST_TEMPLATE.md

@@ -32,9 +32,9 @@ _Describe which projects this change will impact and that needs to be backported
 ## Checklist
 _We encourage you to keep the code coverage percentage at 80% and above._
 
-- [ ] Does this solution meet the acceptance criteria of the related issue?
-- [ ] Is the related issue checklist completed?
-- [ ] Does this PR adds unit tests for the developed code? If not, why?
-- [ ] End-to-End tests have been added or updated?
-- [ ] Was the documentation updated, or a dedicated issue for documentation created? (If applicable)
-- [ ] Is the release notes updated? (If applicable)
+- [ ] This solution meets the acceptance criteria of the related issue.
+- [ ] The related issue checklist is completed.
+- [ ] This PR adds unit tests for the developed code. If not, why?
+- [ ] End-to-End tests have been added or updated. If not, why?
+- [ ] The documentation has been updated, or a dedicated issue created. (If applicable)
+- [ ] The release notes have been updated? (If applicable)

+ 0 - 59
taipy/common/config/checker/_checkers/_auth_config_checker.py

@@ -1,59 +0,0 @@
-# Copyright 2021-2024 Avaiga Private Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
-# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations under the License.
-
-from ..._config import _Config
-from ..issue_collector import IssueCollector
-from ._config_checker import _ConfigChecker
-
-
-class _AuthConfigChecker(_ConfigChecker):
-    def __init__(self, config: _Config, collector: IssueCollector):
-        super().__init__(config, collector)
-
-    def _check(self) -> IssueCollector:
-        auth_config = self._config._auth_config  # type: ignore
-        self._check_predefined_protocol(auth_config)
-        return self._collector
-
-    def _check_predefined_protocol(self, auth_config):
-        if auth_config.protocol == auth_config._PROTOCOL_LDAP:
-            self.__check_ldap(auth_config)
-        if auth_config.protocol == auth_config._PROTOCOL_TAIPY:
-            self.__check_taipy(auth_config)
-
-    def __check_taipy(self, auth_config):
-        if auth_config._TAIPY_ROLES not in auth_config.properties:
-            self._error(
-                "properties",
-                auth_config._LDAP_SERVER,
-                f"`{auth_config._LDAP_SERVER}` property must be populated when {auth_config._PROTOCOL_LDAP} is used.",
-            )
-        if auth_config._TAIPY_PWD not in auth_config.properties:
-            self._warning(
-                "properties",
-                auth_config._TAIPY_PWD,
-                f"`In order to protect authentication with passwords using {auth_config._PROTOCOL_TAIPY} protocol,"
-                f" {auth_config._TAIPY_PWD}` property can be populated.",
-            )
-
-    def __check_ldap(self, auth_config):
-        if auth_config._LDAP_SERVER not in auth_config.properties:
-            self._error(
-                "properties",
-                auth_config._LDAP_SERVER,
-                f"`{auth_config._LDAP_SERVER}` attribute must be populated when {auth_config._PROTOCOL_LDAP} is used.",
-            )
-        if auth_config._LDAP_BASE_DN not in auth_config.properties:
-            self._error(
-                "properties",
-                auth_config._LDAP_BASE_DN,
-                f"`{auth_config._LDAP_BASE_DN}` field must be populated when {auth_config._PROTOCOL_LDAP} is used.",
-            )

+ 3 - 0
taipy/core/_orchestrator/_dispatcher/_job_dispatcher.py

@@ -159,4 +159,7 @@ class _JobDispatcher(threading.Thread):
                 _TaipyLogger._get_logger().error(st)
             _JobManagerFactory._build_manager()._set(job)
         else:
+            for output in job.task.output.values():
+                output.track_edit(job_id=job.id)
+                output.unlock_edit()
             job.completed()

+ 1 - 1
taipy/core/_orchestrator/_dispatcher/_task_function_wrapper.py

@@ -65,7 +65,7 @@ class _TaskFunctionWrapper:
                 for res, dn in zip(_results, outputs):
                     try:
                         data_node = data_manager._get(dn.id)
-                        data_node.write(res, job_id=job_id)
+                        data_node._write(res)
                     except Exception as e:
                         logger.error("Error during write", exc_info=1)
                         exceptions.append(DataNodeWritingError(f"Error writing in datanode id {dn.id}: {e}"))

+ 2 - 2
taipy/core/data/data_node.py

@@ -461,8 +461,6 @@ class DataNode(_Entity, _Labeled):
             **kwargs (Any): Extra information to attach to the edit document
                 corresponding to this write.
         """
-        from ._data_manager_factory import _DataManagerFactory
-
         if (editor_id
             and self.edit_in_progress
             and self.editor_id != editor_id
@@ -471,6 +469,8 @@ class DataNode(_Entity, _Labeled):
         self._write(data)
         self.track_edit(job_id=job_id, editor_id=editor_id, comment=comment, **kwargs)
         self.unlock_edit()
+        from ._data_manager_factory import _DataManagerFactory
+
         _DataManagerFactory._build_manager()._set(self)
 
     def track_edit(self,

+ 17 - 3
tests/core/_orchestrator/_dispatcher/test_dispatcher__update_job_status.py

@@ -10,19 +10,25 @@
 # specific language governing permissions and limitations under the License.
 import traceback
 
-from taipy import Job, JobId, Status, Task
+from taipy import Job, JobId, Scope, Status, Task
 from taipy.core._orchestrator._dispatcher import _JobDispatcher
 from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
+from taipy.core.data import InMemoryDataNode
+from taipy.core.data.data_node_id import EDIT_JOB_ID_KEY, EDIT_TIMESTAMP_KEY
 from taipy.core.job._job_manager_factory import _JobManagerFactory
 from taipy.core.task._task_manager_factory import _TaskManagerFactory
 
 
 def nothing(*args):
-    pass
+    return 42
 
 
+def _error():
+    raise RuntimeError("Something bad has happened")
+
 def test_update_job_status_no_exception():
-    task = Task("config_id", {}, nothing)
+    output = InMemoryDataNode("data_node", scope=Scope.SCENARIO)
+    task = Task("config_id",  {}, nothing, output=[output])
     _TaskManagerFactory._build_manager()._set(task)
     job = Job(JobId("id"), task, "s_id", task.id)
     _JobManagerFactory._build_manager()._set(job)
@@ -31,6 +37,14 @@ def test_update_job_status_no_exception():
 
     assert job.status == Status.COMPLETED
     assert job.stacktrace == []
+    assert len(output.edits) == 1
+    assert len(output.edits[0]) == 2
+    assert output.edits[0][EDIT_JOB_ID_KEY] == job.id
+    assert output.edits[0][EDIT_TIMESTAMP_KEY] is not None
+    assert output.last_edit_date is not None
+    assert output.editor_id is None
+    assert output.editor_expiration_date is None
+    assert not output.edit_in_progress
 
 
 def test_update_job_status_with_one_exception():

+ 3 - 0
tests/core/job/test_job.py

@@ -73,9 +73,12 @@ def job(task, job_id):
 @pytest.fixture
 def replace_in_memory_write_fct():
     default_write = InMemoryDataNode.write
+    default__write = InMemoryDataNode._write
     InMemoryDataNode.write = _error
+    InMemoryDataNode._write = _error
     yield
     InMemoryDataNode.write = default_write
+    InMemoryDataNode._write = default__write
 
 
 def _foo():

+ 12 - 3
tests/core/notification/test_events_published.py

@@ -12,6 +12,9 @@
 from queue import SimpleQueue
 from typing import Any, Dict, List
 
+import pytest
+
+from taipy import Orchestrator
 from taipy.common.config import Config, Frequency
 from taipy.core import taipy as tp
 from taipy.core.job.status import Status
@@ -154,8 +157,10 @@ def test_events_published_for_writing_dn():
     assert snapshot.operation_collected.get(EventOperation.UPDATE, 0) == 5
     all_evts.stop()
 
-
-def test_events_published_for_scenario_submission():
+@pytest.mark.parametrize("standalone", [False, True])
+def test_events_published_for_scenario_submission(standalone):
+    if standalone:
+        Config.configure_job_executions(mode="standalone", max_nb_of_workers=2)
     input_config = Config.configure_data_node("the_input")
     output_config = Config.configure_data_node("the_output")
     task_config = Config.configure_task("the_task", identity, input=input_config, output=output_config)
@@ -176,7 +181,11 @@ def test_events_published_for_scenario_submission():
     # 1 submission update event for jobs
     # 3 submission update events (for status: PENDING, RUNNING and COMPLETED)
     # 1 submission update event for is_completed
-    scenario.submit()
+    if standalone:
+        Orchestrator().run()
+        scenario.submit(wait=True)
+    else:
+        scenario.submit()
     snapshot = all_evts.capture()
     assert len(snapshot.collected_events) == 18
     assert snapshot.entity_type_collected.get(EventEntityType.CYCLE, 0) == 0