浏览代码

remove setting and getting key properties as attribute

Toan Quach 9 月之前
父节点
当前提交
b7b4b1e414

+ 2 - 10
taipy/core/_entity/_properties.py

@@ -12,9 +12,7 @@
 from collections import UserDict
 
 from taipy.config.common._template_handler import _TemplateHandler as _tpl
-from taipy.config.common._validate_id import _validate_id
 
-from ..exceptions.exceptions import PropertyKeyAlreadyExisted
 from ..notification import EventOperation, Notifier, _make_event
 
 
@@ -28,15 +26,11 @@ class _Properties(UserDict):
         self._pending_deletions = set()
 
     def __setitem__(self, key, value):
+        super(_Properties, self).__setitem__(key, value)
+
         if hasattr(self, "_entity_owner"):
             from ... import core as tp
 
-            try:
-                self._entity_owner._get_attributes(_validate_id(key), key)
-                raise PropertyKeyAlreadyExisted(key)
-            except AttributeError:
-                super(_Properties, self).__setitem__(key, value)
-
             event = _make_event(
                 self._entity_owner,
                 EventOperation.UPDATE,
@@ -51,8 +45,6 @@ class _Properties(UserDict):
                     self._pending_deletions.remove(key)
                 self._pending_changes[key] = value
                 self._entity_owner._in_context_attributes_changed_collector.append(event)
-        else:
-            super(_Properties, self).__setitem__(key, value)
 
     def __getitem__(self, key):
         return _tpl._replace_templates(super(_Properties, self).__getitem__(key))

+ 1 - 25
taipy/core/cycle/cycle.py

@@ -14,14 +14,13 @@ import uuid
 from datetime import datetime
 from typing import Any, Dict, Optional
 
-from taipy.config.common._validate_id import _validate_id
 from taipy.config.common.frequency import Frequency
 
 from .._entity._entity import _Entity
 from .._entity._labeled import _Labeled
 from .._entity._properties import _Properties
 from .._entity._reload import _Reloader, _self_reload, _self_setter
-from ..exceptions.exceptions import AttributeKeyAlreadyExisted, _SuspiciousFileOperation
+from ..exceptions.exceptions import _SuspiciousFileOperation
 from ..notification.event import Event, EventEntityType, EventOperation, _make_event
 from .cycle_id import CycleId
 
@@ -123,8 +122,6 @@ class Cycle(_Entity, _Labeled):
         self.id = id or self._new_id(self._name)
         self._properties = _Properties(self, **properties)
 
-        self._init_done = True
-
     def _new_name(self, name: Optional[str] = None) -> str:
         if name:
             return name
@@ -215,27 +212,6 @@ class Cycle(_Entity, _Labeled):
 
         return CycleId(_get_valid_filename(Cycle.__SEPARATOR.join([Cycle._ID_PREFIX, name, str(uuid.uuid4())])))
 
-    def __setattr__(self, name: str, value: Any) -> None:
-        if self.__CHECK_INIT_DONE_ATTR_NAME not in dir(self) or name in dir(self):
-            return super().__setattr__(name, value)
-        else:
-            protected_attribute_name = _validate_id(name)
-            try:
-                if protected_attribute_name not in self._properties:
-                    raise AttributeError
-                raise AttributeKeyAlreadyExisted(name)
-            except AttributeError:
-                return super().__setattr__(name, value)
-
-    def _get_attributes(self, protected_attribute_name, attribute_name):
-        raise AttributeError
-
-    def __getattr__(self, attribute_name):
-        protected_attribute_name = attribute_name
-        if protected_attribute_name in self._properties:
-            return self._properties[protected_attribute_name]
-        raise AttributeError(f"{attribute_name} is not an attribute of cycle {self.id}")
-
     def __eq__(self, other):
         return isinstance(other, Cycle) and self.id == other.id
 

+ 1 - 19
taipy/core/data/data_node.py

@@ -29,7 +29,7 @@ from .._entity._ready_to_run_property import _ReadyToRunProperty
 from .._entity._reload import _Reloader, _self_reload, _self_setter
 from .._version._version_manager_factory import _VersionManagerFactory
 from ..common._warnings import _warn_deprecated
-from ..exceptions.exceptions import AttributeKeyAlreadyExisted, DataNodeIsBeingEdited, NoData
+from ..exceptions.exceptions import DataNodeIsBeingEdited, NoData
 from ..job.job_id import JobId
 from ..notification.event import Event, EventEntityType, EventOperation, _make_event
 from ..reason import DataNodeEditInProgress, DataNodeIsNotWritten
@@ -349,24 +349,6 @@ class DataNode(_Entity, _Labeled):
     def __setstate__(self, state):
         vars(self).update(state)
 
-    def __setattr__(self, name: str, value: Any) -> None:
-        if self.__CHECK_INIT_DONE_ATTR_NAME not in dir(self) or name in dir(self):
-            return super().__setattr__(name, value)
-        else:
-            protected_attribute_name = _validate_id(name)
-            if protected_attribute_name in self._properties:
-                raise AttributeKeyAlreadyExisted(name)
-            return super().__setattr__(name, value)
-
-    def _get_attributes(self, protected_attribute_name, attribute_name):
-        raise AttributeError
-
-    def __getattr__(self, attribute_name):
-        protected_attribute_name = _validate_id(attribute_name)
-        if protected_attribute_name in self._properties:
-            return self._properties[protected_attribute_name]
-        raise AttributeError(f"{attribute_name} is not an attribute of data node {self.id}")
-
     @classmethod
     def _get_last_modified_datetime(cls, path: Optional[str] = None) -> Optional[datetime]:
         if path and os.path.isfile(path):

+ 4 - 4
taipy/core/data/mongo.py

@@ -143,15 +143,15 @@ class MongoCollectionDataNode(DataNode):
             properties.get(self.__COLLECTION_KEY, "")
         ]
 
-        self.custom_mongo_document = properties[self._CUSTOM_DOCUMENT_PROPERTY]
+        self.custom_document = properties[self._CUSTOM_DOCUMENT_PROPERTY]
 
         self._decoder = self._default_decoder
-        custom_decoder = getattr(self.custom_mongo_document, "decode", None)
+        custom_decoder = getattr(self.custom_document, "decode", None)
         if callable(custom_decoder):
             self._decoder = custom_decoder
 
         self._encoder = self._default_encoder
-        custom_encoder = getattr(self.custom_mongo_document, "encode", None)
+        custom_encoder = getattr(self.custom_document, "encode", None)
         if callable(custom_encoder):
             self._encoder = custom_encoder
 
@@ -275,7 +275,7 @@ class MongoCollectionDataNode(DataNode):
         Returns:
             A custom document object.
         """
-        return self.custom_mongo_document(**document)
+        return self.custom_document(**document)
 
     def _default_encoder(self, document_object: Any) -> Dict:
         """Encode a custom document object to a dictionary for writing to MongoDB.

+ 0 - 7
taipy/core/exceptions/exceptions.py

@@ -390,10 +390,3 @@ class AttributeKeyAlreadyExisted(Exception):
 
     def __init__(self, key: str):
         self.message = f"Attribute key '{key}' already existed."
-
-
-class PropertyKeyAlreadyExisted(Exception):
-    """Raised when a property key already existed."""
-
-    def __init__(self, key: str):
-        self.message = f"Property key '{key}' already existed."

+ 3 - 14
taipy/core/scenario/scenario.py

@@ -16,7 +16,6 @@ from typing import Any, Callable, Dict, List, Optional, Set, Union
 
 import networkx as nx
 
-from taipy.config.common._template_handler import _TemplateHandler as _tpl
 from taipy.config.common._validate_id import _validate_id
 
 from .._entity._entity import _Entity
@@ -183,17 +182,14 @@ class Scenario(_Entity, Submittable, _Labeled):
         if self.__CHECK_INIT_DONE_ATTR_NAME not in dir(self) or name in dir(self):
             return super().__setattr__(name, value)
         else:
-            protected_attribute_name = _validate_id(name)
             try:
-                if protected_attribute_name not in self._properties and not self._get_attributes(
-                    protected_attribute_name, name
-                ):
-                    raise AttributeError
+                self.__getattr__(name)
                 raise AttributeKeyAlreadyExisted(name)
             except AttributeError:
                 return super().__setattr__(name, value)
 
-    def _get_attributes(self, protected_attribute_name, attribute_name) -> Union[Sequence, Task, DataNode]:
+    def __getattr__(self, attribute_name) -> Union[Sequence, Task, DataNode]:
+        protected_attribute_name = _validate_id(attribute_name)
         sequences = self._get_sequences()
         if protected_attribute_name in sequences:
             return sequences[protected_attribute_name]
@@ -206,13 +202,6 @@ class Scenario(_Entity, Submittable, _Labeled):
 
         raise AttributeError(f"{attribute_name} is not an attribute of scenario {self.id}")
 
-    def __getattr__(self, attribute_name):
-        protected_attribute_name = _validate_id(attribute_name)
-        if protected_attribute_name in self._properties:
-            return _tpl._replace_templates(self._properties[protected_attribute_name])
-
-        return self._get_attributes(protected_attribute_name, attribute_name)
-
     @property
     def config_id(self):
         return self._config_id

+ 3 - 14
taipy/core/sequence/sequence.py

@@ -15,7 +15,6 @@ from typing import Any, Callable, Dict, List, Optional, Set, Union
 
 import networkx as nx
 
-from taipy.config.common._template_handler import _TemplateHandler as _tpl
 from taipy.config.common._validate_id import _validate_id
 
 from .._entity._entity import _Entity
@@ -162,17 +161,14 @@ class Sequence(_Entity, Submittable, _Labeled):
         if self.__CHECK_INIT_DONE_ATTR_NAME not in dir(self) or name in dir(self):
             return super().__setattr__(name, value)
         else:
-            protected_attribute_name = _validate_id(name)
             try:
-                if protected_attribute_name not in self._properties and not self._get_attributes(
-                    protected_attribute_name, name
-                ):
-                    raise AttributeError
+                self.__getattr__(name)
                 raise AttributeKeyAlreadyExisted(name)
             except AttributeError:
                 return super().__setattr__(name, value)
 
-    def _get_attributes(self, protected_attribute_name, attribute_name) -> Union[Task, DataNode]:
+    def __getattr__(self, attribute_name):
+        protected_attribute_name = _validate_id(attribute_name)
         tasks = self._get_tasks()
         if protected_attribute_name in tasks:
             return tasks[protected_attribute_name]
@@ -183,13 +179,6 @@ class Sequence(_Entity, Submittable, _Labeled):
                 return task.output[protected_attribute_name]
         raise AttributeError(f"{attribute_name} is not an attribute of sequence {self.id}")
 
-    def __getattr__(self, attribute_name):
-        protected_attribute_name = _validate_id(attribute_name)
-        if protected_attribute_name in self._properties:
-            return _tpl._replace_templates(self._properties[protected_attribute_name])
-
-        return self._get_attributes(protected_attribute_name, attribute_name)
-
     @property  # type: ignore
     @_self_reload(_MANAGER_NAME)
     def tasks(self) -> Dict[str, Task]:

+ 3 - 13
taipy/core/task/task.py

@@ -12,7 +12,6 @@
 import uuid
 from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Union
 
-from taipy.config.common._template_handler import _TemplateHandler as _tpl
 from taipy.config.common._validate_id import _validate_id
 from taipy.config.common.scope import Scope
 
@@ -141,29 +140,20 @@ class Task(_Entity, _Labeled):
         if self.__CHECK_INIT_DONE_ATTR_NAME not in dir(self) or name in dir(self):
             return super().__setattr__(name, value)
         else:
-            protected_attribute_name = _validate_id(name)
             try:
-                if protected_attribute_name not in self._properties and not self._get_attributes(
-                    protected_attribute_name, name
-                ):
-                    raise AttributeError
+                self.__getattr__(name)
                 raise AttributeKeyAlreadyExisted(name)
             except AttributeError:
                 return super().__setattr__(name, value)
 
-    def _get_attributes(self, protected_attribute_name, attribute_name) -> DataNode:
+    def __getattr__(self, attribute_name):
+        protected_attribute_name = _validate_id(attribute_name)
         if protected_attribute_name in self.input:
             return self.input[protected_attribute_name]
         if protected_attribute_name in self.output:
             return self.output[protected_attribute_name]
         raise AttributeError(f"{attribute_name} is not an attribute of task {self.id}")
 
-    def __getattr__(self, attribute_name):
-        protected_attribute_name = _validate_id(attribute_name)
-        if protected_attribute_name in self._properties:
-            return _tpl._replace_templates(self._properties[protected_attribute_name])
-        return self._get_attributes(protected_attribute_name, attribute_name)
-
     @property
     def properties(self):
         self._properties = _Reloader()._reload(self._MANAGER_NAME, self)._properties

+ 3 - 33
tests/core/cycle/test_cycle.py

@@ -12,14 +12,11 @@
 import datetime
 from datetime import timedelta
 
-import pytest
-
 from taipy.config.common.frequency import Frequency
 from taipy.core import CycleId
 from taipy.core.cycle._cycle_manager import _CycleManager
 from taipy.core.cycle._cycle_manager_factory import _CycleManagerFactory
 from taipy.core.cycle.cycle import Cycle
-from taipy.core.exceptions import AttributeKeyAlreadyExisted
 from taipy.core.task.task import Task
 
 
@@ -53,7 +50,7 @@ def test_create_cycle_entity(current_datetime):
     assert cycle_1.creation_date == current_datetime
     assert cycle_1.start_date == current_datetime
     assert cycle_1.end_date == current_datetime
-    assert cycle_1.key == "value"
+    assert cycle_1.properties["key"] == "value"
     assert cycle_1.frequency == Frequency.DAILY
 
     cycle_2 = Cycle(Frequency.YEARLY, {}, current_datetime, current_datetime, current_datetime)
@@ -115,41 +112,14 @@ def test_add_property_to_scenario(current_datetime):
         name="foo",
     )
     assert cycle.properties == {"key": "value"}
-    assert cycle.key == "value"
+    assert cycle.properties["key"] == "value"
 
     cycle.properties["new_key"] = "new_value"
 
     assert cycle.properties == {"key": "value", "new_key": "new_value"}
-    assert cycle.key == "value"
-    assert cycle.new_key == "new_value"
-
-
-def test_get_set_property_and_attribute(current_datetime):
-    cycle_manager = _CycleManagerFactory()._build_manager()
-
-    cycle = Cycle(
-        Frequency.WEEKLY,
-        {"key": "value"},
-        current_datetime,
-        current_datetime,
-        current_datetime,
-        name="foo",
-    )
-    cycle_manager._set(cycle)
-
-    assert cycle.properties == {"key": "value"}
-
-    cycle.properties["new_key"] = "new_value"
-    cycle.another_key = "another_value"
-
-    assert cycle.key == "value"
-    assert cycle.new_key == "new_value"
-    assert cycle.another_key == "another_value"
+    assert cycle.properties["key"] == "value"
     assert cycle.properties["new_key"] == "new_value"
 
-    with pytest.raises(AttributeKeyAlreadyExisted):
-        cycle.key = "KeyAlreadyUsed"
-
 
 def test_auto_set_and_reload(current_datetime):
     cycle_1 = Cycle(

+ 1 - 1
tests/core/cycle/test_cycle_manager.py

@@ -91,7 +91,7 @@ def test_create_and_delete_cycle_entity(tmpdir):
     assert cycle_1.start_date is not None
     assert cycle_1.end_date is not None
     assert cycle_1.start_date < cycle_1.creation_date < cycle_1.end_date
-    assert cycle_1.key == "value"
+    assert cycle_1.properties["key"] == "value"
     assert cycle_1.frequency == Frequency.DAILY
 
     cycle_1_id = cycle_1.id

+ 4 - 4
tests/core/data/test_csv_data_node.py

@@ -65,8 +65,8 @@ class TestCSVDataNode:
         assert dn.job_ids == []
         assert not dn.is_ready_for_reading
         assert dn.path == default_path
-        assert dn.has_header is False
-        assert dn.exposed_type == "pandas"
+        assert dn.properties["has_header"] is False
+        assert dn.properties["exposed_type"] == "pandas"
 
         csv_dn_config = Config.configure_csv_data_node(
             id="foo", default_path=default_path, has_header=True, exposed_type=MyCustomObject
@@ -74,8 +74,8 @@ class TestCSVDataNode:
         dn = _DataManagerFactory._build_manager()._create_and_set(csv_dn_config, None, None)
         assert dn.storage_type() == "csv"
         assert dn.config_id == "foo"
-        assert dn.has_header is True
-        assert dn.exposed_type == MyCustomObject
+        assert dn.properties["has_header"] is True
+        assert dn.properties["exposed_type"] == MyCustomObject
 
         with pytest.raises(InvalidConfigurationId):
             CSVDataNode(

+ 2 - 2
tests/core/data/test_data_manager.py

@@ -303,14 +303,14 @@ class TestDataManager:
         assert csv_dn.config_id == "foo"
         assert isinstance(csv_dn, CSVDataNode)
         assert csv_dn._path == "path_from_config_file"
-        assert csv_dn.has_header
+        assert csv_dn.properties["has_header"]
 
         csv_dn_cfg = Config.configure_data_node(id="baz", storage_type="csv", path="bar", has_header=True)
         csv_dn = _DataManager._create_and_set(csv_dn_cfg, None, None)
         assert csv_dn.config_id == "baz"
         assert isinstance(csv_dn, CSVDataNode)
         assert csv_dn._path == "bar"
-        assert csv_dn.has_header
+        assert csv_dn.properties["has_header"]
 
     def test_get_if_not_exists(self):
         with pytest.raises(ModelNotFound):

+ 1 - 20
tests/core/data/test_data_node.py

@@ -25,7 +25,7 @@ from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data.data_node import DataNode
 from taipy.core.data.data_node_id import DataNodeId
 from taipy.core.data.in_memory import InMemoryDataNode
-from taipy.core.exceptions.exceptions import AttributeKeyAlreadyExisted, DataNodeIsBeingEdited, NoData
+from taipy.core.exceptions.exceptions import DataNodeIsBeingEdited, NoData
 from taipy.core.job.job_id import JobId
 from taipy.core.task.task import Task
 
@@ -119,24 +119,6 @@ class TestDataNode:
         with pytest.raises(InvalidConfigurationId):
             DataNode("foo bar")
 
-    def test_get_set_property_and_attribute(self):
-        dn_cfg = Config.configure_data_node("bar", key="value")
-        dn = _DataManager._create_and_set(dn_cfg, "", "")
-
-        assert "key" in dn.properties.keys()
-        assert dn.key == "value"
-
-        dn.properties["new_key"] = "new_value"
-        dn.another_key = "another_value"
-
-        assert dn.key == "value"
-        assert dn.new_key == "new_value"
-        assert dn.another_key == "another_value"
-        assert dn.properties["new_key"] == "new_value"
-
-        with pytest.raises(AttributeKeyAlreadyExisted):
-            dn.key = "KeyAlreadyUsed"
-
     def test_read_write(self):
         dn = FakeDataNode("foo_bar")
         with pytest.raises(NoData):
@@ -688,7 +670,6 @@ class TestDataNode:
             dn = _DataManager._bulk_get_or_create([dn_config])[dn_config]
             assert dn._properties.data["prop"] == "ENV[FOO]"
             assert dn.properties["prop"] == "bar"
-            assert dn.prop == "bar"
 
     def test_path_populated_with_config_default_path(self):
         dn_config = Config.configure_data_node("data_node", "pickle", default_path="foo.p")

+ 18 - 18
tests/core/data/test_excel_data_node.py

@@ -94,17 +94,17 @@ class TestExcelDataNode:
         assert dn.job_ids == []
         assert not dn.is_ready_for_reading
         assert dn.path == path
-        assert dn.has_header is False
-        assert dn.sheet_name == "Sheet1"
+        assert dn.properties["has_header"] is False
+        assert dn.properties["sheet_name"] == "Sheet1"
 
         excel_dn_config_1 = Config.configure_excel_data_node(
             id="baz", default_path=path, has_header=True, sheet_name="Sheet1", exposed_type=MyCustomObject
         )
         dn_1 = _DataManagerFactory._build_manager()._create_and_set(excel_dn_config_1, None, None)
         assert isinstance(dn_1, ExcelDataNode)
-        assert dn_1.has_header is True
-        assert dn_1.sheet_name == "Sheet1"
-        assert dn_1.exposed_type == MyCustomObject
+        assert dn_1.properties["has_header"] is True
+        assert dn_1.properties["sheet_name"] == "Sheet1"
+        assert dn_1.properties["exposed_type"] == MyCustomObject
 
         excel_dn_config_2 = Config.configure_excel_data_node(
             id="baz",
@@ -115,16 +115,16 @@ class TestExcelDataNode:
         )
         dn_2 = _DataManagerFactory._build_manager()._create_and_set(excel_dn_config_2, None, None)
         assert isinstance(dn_2, ExcelDataNode)
-        assert dn_2.sheet_name == sheet_names
-        assert dn_2.exposed_type == {"Sheet1": "pandas", "Sheet2": "numpy"}
+        assert dn_2.properties["sheet_name"] == sheet_names
+        assert dn_2.properties["exposed_type"] == {"Sheet1": "pandas", "Sheet2": "numpy"}
 
         excel_dn_config_3 = Config.configure_excel_data_node(
             id="baz", default_path=path, has_header=True, sheet_name=sheet_names, exposed_type=MyCustomObject
         )
         dn_3 = _DataManagerFactory._build_manager()._create_and_set(excel_dn_config_3, None, None)
         assert isinstance(dn_3, ExcelDataNode)
-        assert dn_3.sheet_name == sheet_names
-        assert dn_3.exposed_type == MyCustomObject
+        assert dn_3.properties["sheet_name"] == sheet_names
+        assert dn_3.properties["exposed_type"] == MyCustomObject
 
         excel_dn_config_4 = Config.configure_excel_data_node(
             id="baz",
@@ -135,8 +135,8 @@ class TestExcelDataNode:
         )
         dn_4 = _DataManagerFactory._build_manager()._create_and_set(excel_dn_config_4, None, None)
         assert isinstance(dn_4, ExcelDataNode)
-        assert dn_4.sheet_name == sheet_names
-        assert dn_4.exposed_type == {"Sheet1": MyCustomObject, "Sheet2": MyCustomObject2}
+        assert dn_4.properties["sheet_name"] == sheet_names
+        assert dn_4.properties["exposed_type"] == {"Sheet1": MyCustomObject, "Sheet2": MyCustomObject2}
 
     def test_get_user_properties(self, excel_file):
         dn_1 = ExcelDataNode("dn_1", Scope.SCENARIO, properties={"path": "data/node/path"})
@@ -204,7 +204,7 @@ class TestExcelDataNode:
             pathlib.Path(__file__).parent.resolve(), "data_sample/example_2.xlsx"
         )  # ["Sheet1", "Sheet2", "Sheet3"]
         dn = ExcelDataNode("foo", Scope.SCENARIO, properties={"default_path": path, "exposed_type": MyCustomObject1})
-        assert dn.exposed_type == MyCustomObject1
+        assert dn.properties["exposed_type"] == MyCustomObject1
         dn.read()
         dn.path = new_path
         dn.read()
@@ -214,7 +214,7 @@ class TestExcelDataNode:
             Scope.SCENARIO,
             properties={"default_path": path, "exposed_type": MyCustomObject1, "sheet_name": ["Sheet4"]},
         )
-        assert dn.exposed_type == MyCustomObject1
+        assert dn.properties["exposed_type"] == MyCustomObject1
         with pytest.raises(NonExistingExcelSheet):
             dn.read()
 
@@ -264,14 +264,14 @@ class TestExcelDataNode:
             "foo", Scope.SCENARIO, properties={"default_path": "notexistyet.xlsx", "exposed_type": MyCustomObject1}
         )
         assert dn.path == "notexistyet.xlsx"
-        assert dn.exposed_type == MyCustomObject1
+        assert dn.properties["exposed_type"] == MyCustomObject1
         dn = ExcelDataNode(
             "foo",
             Scope.SCENARIO,
             properties={"default_path": "notexistyet.xlsx", "exposed_type": [MyCustomObject1, MyCustomObject2]},
         )
         assert dn.path == "notexistyet.xlsx"
-        assert dn.exposed_type == [MyCustomObject1, MyCustomObject2]
+        assert dn.properties["exposed_type"] == [MyCustomObject1, MyCustomObject2]
         dn = ExcelDataNode(
             "foo",
             Scope.SCENARIO,
@@ -281,12 +281,12 @@ class TestExcelDataNode:
             },
         )
         assert dn.path == "notexistyet.xlsx"
-        assert dn.exposed_type == {"Sheet1": MyCustomObject1, "Sheet2": MyCustomObject2}
+        assert dn.properties["exposed_type"] == {"Sheet1": MyCustomObject1, "Sheet2": MyCustomObject2}
 
     def test_exposed_type_default(self):
         path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.xlsx")
         dn = ExcelDataNode("foo", Scope.SCENARIO, properties={"default_path": path, "sheet_name": "Sheet1"})
-        assert dn.exposed_type == "pandas"
+        assert dn.properties["exposed_type"] == "pandas"
         data = dn.read()
         assert isinstance(data, pd.DataFrame)
 
@@ -295,7 +295,7 @@ class TestExcelDataNode:
         dn = ExcelDataNode(
             "foo", Scope.SCENARIO, properties={"default_path": path, "exposed_type": "pandas", "sheet_name": "Sheet1"}
         )
-        assert dn.exposed_type == "pandas"
+        assert dn.properties["exposed_type"] == "pandas"
         data = dn.read()
         assert isinstance(data, pd.DataFrame)
 

+ 0 - 1
tests/core/data/test_json_data_node.py

@@ -108,7 +108,6 @@ class TestJSONDataNode:
         assert isinstance(dn_2, JSONDataNode)
         assert dn_2.storage_type() == "json"
         assert dn_2.properties["encoding"] == "utf-16"
-        assert dn_2.encoding == "utf-16"
 
         json_dn_config_3 = Config.configure_json_data_node(
             id="foo", default_path=path, encoder=MyCustomEncoder, decoder=MyCustomDecoder

+ 1 - 1
tests/core/data/test_mongo_data_node.py

@@ -88,7 +88,7 @@ class TestMongoCollectionDataNode:
         assert mongo_dn.owner_id is None
         assert mongo_dn.job_ids == []
         assert mongo_dn.is_ready_for_reading
-        assert mongo_dn.custom_mongo_document == MongoDefaultDocument
+        assert mongo_dn.custom_document == MongoDefaultDocument
 
     @pytest.mark.parametrize("properties", __properties)
     def test_get_user_properties(self, properties):

+ 4 - 4
tests/core/data/test_parquet_data_node.py

@@ -84,16 +84,16 @@ class TestParquetDataNode:
         assert dn.job_ids == []
         assert not dn.is_ready_for_reading
         assert dn.path == path
-        assert dn.exposed_type == "pandas"
-        assert dn.compression == "snappy"
-        assert dn.engine == "pyarrow"
+        assert dn.properties["exposed_type"] == "pandas"
+        assert dn.properties["compression"] == "snappy"
+        assert dn.properties["engine"] == "pyarrow"
 
         parquet_dn_config_1 = Config.configure_parquet_data_node(
             id="bar", default_path=path, compression=compression, exposed_type=MyCustomObject
         )
         dn_1 = _DataManagerFactory._build_manager()._create_and_set(parquet_dn_config_1, None, None)
         assert isinstance(dn_1, ParquetDataNode)
-        assert dn_1.exposed_type == MyCustomObject
+        assert dn_1.properties["exposed_type"] == MyCustomObject
 
         with pytest.raises(InvalidConfigurationId):
             dn = ParquetDataNode("foo bar", Scope.SCENARIO, properties={"path": path, "name": "super name"})

+ 1 - 1
tests/core/data/test_read_excel_data_node.py

@@ -584,7 +584,7 @@ def test_read_multi_sheet_without_header_single_custom_object_exposed_type():
     )
 
     data_custom = excel_data_node_as_custom_object.read()
-    assert excel_data_node_as_custom_object.exposed_type == MyCustomObject1
+    assert excel_data_node_as_custom_object.properties["exposed_type"] == MyCustomObject1
     assert isinstance(data_custom, Dict)
     assert len(data_custom) == 2
     assert all(len(data_custom[sheet_name]) == 6 for sheet_name in sheet_names)

+ 5 - 5
tests/core/data/test_sql_data_node.py

@@ -119,9 +119,9 @@ class TestSQLDataNode:
         assert dn.owner_id is None
         assert dn.job_ids == []
         assert dn.is_ready_for_reading
-        assert dn.exposed_type == "pandas"
-        assert dn.read_query == "SELECT * FROM example"
-        assert dn.write_query_builder == my_write_query_builder_with_pandas
+        assert dn.properties["exposed_type"] == "pandas"
+        assert dn.properties["read_query"] == "SELECT * FROM example"
+        assert dn.properties["write_query_builder"] == my_write_query_builder_with_pandas
 
         sql_dn_config_1 = Config.configure_sql_data_node(
             id="foo",
@@ -131,8 +131,8 @@ class TestSQLDataNode:
         )
         dn_1 = _DataManagerFactory._build_manager()._create_and_set(sql_dn_config_1, None, None)
         assert isinstance(dn, SQLDataNode)
-        assert dn_1.exposed_type == MyCustomObject
-        assert dn_1.append_query_builder == my_append_query_builder_with_pandas
+        assert dn_1.properties["exposed_type"] == MyCustomObject
+        assert dn_1.properties["append_query_builder"] == my_append_query_builder_with_pandas
 
     @pytest.mark.parametrize("properties", __sql_properties)
     def test_get_user_properties(self, properties):

+ 3 - 3
tests/core/data/test_sql_table_data_node.py

@@ -97,8 +97,8 @@ class TestSQLTableDataNode:
         assert dn.owner_id is None
         assert dn.job_ids == []
         assert dn.is_ready_for_reading
-        assert dn.exposed_type == "pandas"
-        assert dn.table_name == "example"
+        assert dn.properties["exposed_type"] == "pandas"
+        assert dn.properties["table_name"] == "example"
         assert dn._get_base_read_query() == "SELECT * FROM example"
 
         sql_table_dn_config_1 = Config.configure_sql_table_data_node(
@@ -106,7 +106,7 @@ class TestSQLTableDataNode:
         )
         dn_1 = _DataManagerFactory._build_manager()._create_and_set(sql_table_dn_config_1, None, None)
         assert isinstance(dn_1, SQLTableDataNode)
-        assert dn_1.exposed_type == MyCustomObject
+        assert dn_1.properties["exposed_type"] == MyCustomObject
 
     @pytest.mark.parametrize("properties", __sql_properties)
     def test_get_user_properties(self, properties):

+ 9 - 21
tests/core/scenario/test_scenario.py

@@ -26,7 +26,6 @@ from taipy.core.data.in_memory import DataNode, InMemoryDataNode
 from taipy.core.data.pickle import PickleDataNode
 from taipy.core.exceptions.exceptions import (
     AttributeKeyAlreadyExisted,
-    PropertyKeyAlreadyExisted,
     SequenceAlreadyExists,
     SequenceTaskDoesNotExistInScenario,
 )
@@ -63,7 +62,7 @@ def test_create_primary_scenario(cycle):
     assert scenario.data_nodes == {}
     assert scenario.sequences == {}
     assert scenario.properties == {"key": "value"}
-    assert scenario.key == "value"
+    assert scenario.properties["key"] == "value"
     assert scenario.creation_date is not None
     assert scenario.is_primary
     assert scenario.cycle == cycle
@@ -163,28 +162,17 @@ def test_create_scenario_and_add_sequences():
     assert scenario.sequences == {"sequence_1": scenario.sequence_1, "sequence_2": scenario.sequence_2}
 
 
-def test_get_set_property_and_attribute():
+def test_get_set_attribute():
     dn_cfg = Config.configure_data_node("bar")
-    s_cfg = Config.configure_scenario("foo", additional_data_node_configs=[dn_cfg], key="value")
+    s_cfg = Config.configure_scenario("foo", additional_data_node_configs=[dn_cfg])
     scenario = create_scenario(s_cfg)
 
-    assert scenario.properties == {"key": "value"}
-    assert scenario.key == "value"
-
-    scenario.properties["new_key"] = "new_value"
-    scenario.another_key = "another_value"
-
+    scenario.key = "value"
     assert scenario.key == "value"
-    assert scenario.new_key == "new_value"
-    assert scenario.another_key == "another_value"
-    assert scenario.properties == {"key": "value", "new_key": "new_value"}
 
     with pytest.raises(AttributeKeyAlreadyExisted):
         scenario.bar = "KeyAlreadyUsed"
 
-    with pytest.raises(PropertyKeyAlreadyExisted):
-        scenario.properties["bar"] = "KeyAlreadyUsed"
-
 
 def test_create_scenario_overlapping_sequences():
     input_1 = PickleDataNode("input_1", Scope.SCENARIO)
@@ -483,11 +471,11 @@ def test_update_sequence(data_node):
 
     assert len(scenario.sequences) == 1
     assert scenario.sequences["seq_1"].tasks == {"foo": task_1}
-    assert scenario.sequences["seq_1"].name == "seq_1"
+    assert scenario.sequences["seq_1"].properties["name"] == "seq_1"
     scenario.update_sequence("seq_1", [task_2], {"new_key": "new_value"}, [])
     assert len(scenario.sequences) == 1
     assert scenario.sequences["seq_1"].tasks == {"bar": task_2}
-    assert scenario.sequences["seq_1"].name == "seq_1"
+    assert scenario.sequences["seq_1"].properties["name"] == "seq_1"
     assert scenario.sequences["seq_1"].properties["new_key"] == "new_value"
 
 
@@ -521,13 +509,13 @@ def test_add_rename_and_remove_sequences_within_context(data_node):
 def test_add_property_to_scenario():
     scenario = Scenario("foo", set(), {"key": "value"})
     assert scenario.properties == {"key": "value"}
-    assert scenario.key == "value"
+    assert scenario.properties["key"] == "value"
 
     scenario.properties["new_key"] = "new_value"
 
     assert scenario.properties == {"key": "value", "new_key": "new_value"}
-    assert scenario.key == "value"
-    assert scenario.new_key == "new_value"
+    assert scenario.properties["key"] == "value"
+    assert scenario.properties["new_key"] == "new_value"
 
 
 def test_add_cycle_to_scenario(cycle):

+ 9 - 21
tests/core/sequence/test_sequence.py

@@ -20,7 +20,7 @@ from taipy.core.data._data_manager_factory import _DataManagerFactory
 from taipy.core.data.data_node import DataNode
 from taipy.core.data.in_memory import InMemoryDataNode
 from taipy.core.data.pickle import PickleDataNode
-from taipy.core.exceptions import AttributeKeyAlreadyExisted, PropertyKeyAlreadyExisted
+from taipy.core.exceptions import AttributeKeyAlreadyExisted
 from taipy.core.scenario._scenario_manager import _ScenarioManager
 from taipy.core.scenario.scenario import Scenario
 from taipy.core.sequence._sequence_manager import _SequenceManager
@@ -39,7 +39,7 @@ def test_sequence_equals():
     sequence_1 = scenario.sequences["print"]
     sequence_id = sequence_1.id
 
-    assert sequence_1.name == "print"
+    assert sequence_1.properties["name"] == "print"
     sequence_2 = _SequenceManager._get(sequence_id)
     # To test if instance is same type
     task = Task("task", {}, print, [], [], sequence_id)
@@ -57,7 +57,7 @@ def test_create_sequence():
     sequence = Sequence({"description": "description"}, [task], sequence_id=SequenceId("name_1"))
     assert sequence.id == "name_1"
     assert sequence.owner_id is None
-    assert sequence.description == "description"
+    assert sequence.properties["description"] == "description"
     assert sequence.foo == input
     assert sequence.bar == output
     assert sequence.baz.id == task.id
@@ -81,7 +81,7 @@ def test_create_sequence():
     )
     assert sequence_1.id == "name_1"
     assert sequence_1.owner_id == "owner_id"
-    assert sequence_1.description == "description"
+    assert sequence_1.properties["description"] == "description"
     assert sequence_1.input == input_1
     assert sequence_1.output == output_1
     assert sequence_1.task_1 == task_1
@@ -110,7 +110,7 @@ def test_create_sequence():
     )
     assert sequence_2.owner_id == "owner_id"
     assert sequence_2.id == "name_2"
-    assert sequence_2.description == "description"
+    assert sequence_2.properties["description"] == "description"
     assert sequence_2.tasks == {task.config_id: task, task_1.config_id: task_1}
     assert sequence_2.data_nodes == {"foo": input, "bar": output, "input": input_1, "output": output_1}
     assert sequence_2.parent_ids == {"parent_id_1", "parent_id_2"}
@@ -123,11 +123,11 @@ def test_create_sequence():
                 return self.label
 
         get_mck.return_value = MockOwner()
-        assert sequence_2.get_label() == "owner_label > " + sequence_2.name
-        assert sequence_2.get_simple_label() == sequence_2.name
+        assert sequence_2.get_label() == "owner_label > " + sequence_2.properties["name"]
+        assert sequence_2.get_simple_label() == sequence_2.properties["name"]
 
 
-def test_get_set_property_and_attribute():
+def test_get_set_attribute():
     dn_cfg = Config.configure_data_node("bar")
     task_config = Config.configure_task("print", print, [dn_cfg], None)
     scenario_config = Config.configure_scenario("scenario", [task_config])
@@ -135,25 +135,13 @@ def test_get_set_property_and_attribute():
     scenario = _ScenarioManager._create(scenario_config)
     scenario.add_sequences({"seq": list(scenario.tasks.values())})
     sequence = scenario.sequences["seq"]
-    sequence.properties["key"] = "value"
 
-    assert sequence.properties == {"name": "seq", "key": "value"}
+    sequence.key = "value"
     assert sequence.key == "value"
 
-    sequence.properties["new_key"] = "new_value"
-    sequence.another_key = "another_value"
-
-    assert sequence.key == "value"
-    assert sequence.new_key == "new_value"
-    assert sequence.another_key == "another_value"
-    assert sequence.properties == {"name": "seq", "key": "value", "new_key": "new_value"}
-
     with pytest.raises(AttributeKeyAlreadyExisted):
         sequence.bar = "KeyAlreadyUsed"
 
-    with pytest.raises(PropertyKeyAlreadyExisted):
-        sequence.properties["bar"] = "KeyAlreadyUsed"
-
 
 def test_check_consistency():
     sequence_1 = Sequence({}, [], "name_1")

+ 1 - 1
tests/core/sequence/test_sequence_manager.py

@@ -415,7 +415,7 @@ def test_get_or_create_data():
     scenario.add_sequences({"by_6": list(scenario.tasks.values())})
     sequence = scenario.sequences["by_6"]
 
-    assert sequence.name == "by_6"
+    assert sequence.properties["name"] == "by_6"
 
     assert len(_DataManager._get_all()) == 3
     assert len(_TaskManager._get_all()) == 2

+ 4 - 17
tests/core/task/test_task.py

@@ -21,7 +21,7 @@ from taipy.core.data._data_manager import _DataManager
 from taipy.core.data.csv import CSVDataNode
 from taipy.core.data.data_node import DataNode
 from taipy.core.data.in_memory import InMemoryDataNode
-from taipy.core.exceptions import AttributeKeyAlreadyExisted, PropertyKeyAlreadyExisted
+from taipy.core.exceptions import AttributeKeyAlreadyExisted
 from taipy.core.scenario._scenario_manager import _ScenarioManager
 from taipy.core.task._task_manager import _TaskManager
 from taipy.core.task._task_manager_factory import _TaskManagerFactory
@@ -96,7 +96,7 @@ def test_create_task():
     assert task.owner_id == "owner_id"
     assert task.parent_ids == {"parent_id_1", "parent_id_2"}
     assert task.name_1ea == abc_dn
-    assert task.name_1ea.path == path
+    assert task.name_1ea.properties["path"] == path
     with pytest.raises(AttributeError):
         _ = task.bar
     with mock.patch("taipy.core.get") as get_mck:
@@ -112,32 +112,19 @@ def test_create_task():
         assert task.get_simple_label() == task.config_id
 
 
-def test_get_set_property_and_attribute():
+def test_get_set_attribute():
     dn_cfg = Config.configure_data_node("bar")
     task_config = Config.configure_task("print", print, [dn_cfg], None)
     scenario_config = Config.configure_scenario("scenario", [task_config])
     scenario = _ScenarioManager._create(scenario_config)
     task = scenario.tasks["print"]
 
-    task.properties["key"] = "value"
-
-    assert task.properties == {"key": "value"}
+    task.key = "value"
     assert task.key == "value"
 
-    task.properties["new_key"] = "new_value"
-    task.another_key = "another_value"
-
-    assert task.key == "value"
-    assert task.new_key == "new_value"
-    assert task.another_key == "another_value"
-    assert task.properties == {"key": "value", "new_key": "new_value"}
-
     with pytest.raises(AttributeKeyAlreadyExisted):
         task.bar = "KeyAlreadyUsed"
 
-    with pytest.raises(PropertyKeyAlreadyExisted):
-        task.properties["bar"] = "KeyAlreadyUsed"
-
 
 def test_can_not_change_task_output(output):
     task = Task("name_1", {}, print, output=output)