Selaa lähdekoodia

still preparing for polar in enterprise (#1583)

Co-authored-by: Fred Lefévère-Laoide <Fred.Lefevere-Laoide@Taipy.io>
Fred Lefévère-Laoide 10 kuukautta sitten
vanhempi
säilyke
6c8c854258

+ 2 - 2
taipy/gui/_renderers/builder.py

@@ -531,7 +531,7 @@ class _Builder:
                     + "}"
                 )
                 self.__update_vars.append(f"comparedatas={','.join(cmp_datas_hash)}")
-        col_types = self.__gui._accessors._get_col_types(data_hash, _TaipyData(data, data_hash))
+        col_types = self.__gui._get_accessor().get_col_types(data_hash, _TaipyData(data, data_hash))
         col_dict = _get_columns_dict(
             data, self.__attributes.get("columns", {}), col_types, date_format, self.__attributes.get("number_format")
         )
@@ -591,7 +591,7 @@ class _Builder:
         # read column definitions
         data = self.__attributes.get("data")
         data_hash = self.__hashes.get("data", "")
-        col_types = self.__gui._accessors._get_col_types(data_hash, _TaipyData(data, data_hash))
+        col_types = self.__gui._get_accessor().get_col_types(data_hash, _TaipyData(data, data_hash))
 
         config = _build_chart_config(self.__gui, self.__attributes, col_types)
 

+ 18 - 11
taipy/gui/data/array_dict_data_accessor.py

@@ -13,7 +13,6 @@ import typing as t
 
 import pandas as pd
 
-from ..gui import Gui
 from ..utils import _MapDict
 from .data_format import _DataFormat
 from .pandas_data_accessor import _PandasDataAccessor
@@ -26,8 +25,8 @@ class _ArrayDictDataAccessor(_PandasDataAccessor):
     def get_supported_classes() -> t.List[str]:
         return [t.__name__ for t in _ArrayDictDataAccessor.__types]  # type: ignore
 
-    def _get_dataframe(self, value: t.Any) -> t.Union[t.List[pd.DataFrame], pd.DataFrame]:
-        if isinstance(value, list):
+    def to_pandas(self, value: t.Any) -> t.Union[t.List[pd.DataFrame], pd.DataFrame]:
+        if isinstance(value, (list, tuple)):
             if not value or isinstance(value[0], (str, int, float, bool)):
                 return pd.DataFrame({"0": value})
             types = {type(x) for x in value}
@@ -45,7 +44,7 @@ class _ArrayDictDataAccessor(_PandasDataAccessor):
                 elif type_elt is _MapDict:
                     return [pd.DataFrame(v._dict) for v in value]
                 elif type_elt is pd.DataFrame:
-                    return value
+                    return t.cast(t.List[pd.DataFrame], value)
 
             elif len(types) == 2 and list in types and pd.DataFrame in types:
                 return [v if isinstance(v, pd.DataFrame) else pd.DataFrame({f"{i}/0": v}) for i, v in enumerate(value)]
@@ -53,14 +52,22 @@ class _ArrayDictDataAccessor(_PandasDataAccessor):
             return pd.DataFrame(value._dict)
         return pd.DataFrame(value)
 
+    def _from_pandas(self, value: pd.DataFrame, type: t.Type):
+        if type is dict:
+            return value.to_dict()
+        if type is _MapDict:
+            return _MapDict(value.to_dict())
+        if len(value.columns) == 1:
+            if type is list:
+                return value.iloc[:, 0].to_list()
+            if type is tuple:
+                return tuple(value.iloc[:, 0].to_list())
+        return super()._from_pandas(value, type)
+
     def get_col_types(self, var_name: str, value: t.Any) -> t.Union[None, t.Dict[str, str]]:  # type: ignore
-        if isinstance(value, _ArrayDictDataAccessor.__types):  # type: ignore
-            return super().get_col_types(var_name, self._get_dataframe(value))
-        return None
+        return super().get_col_types(var_name, self.to_pandas(value))
 
     def get_data(  # noqa: C901
-        self, guiApp: Gui, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
-        if isinstance(value, _ArrayDictDataAccessor.__types):  # type: ignore
-            return super().get_data(guiApp, var_name, self._get_dataframe(value), payload, data_format)
-        return {}
+        return super().get_data(var_name, self.to_pandas(value), payload, data_format)

+ 2 - 2
taipy/gui/data/comparison.py

@@ -27,12 +27,12 @@ def _compare_function(
             return None
         compare_fn = gui._get_user_function(compare_name) if compare_name else None
         if callable(compare_fn):
-            return gui._accessors._get_dataframe(
+            return gui._get_accessor().to_pandas(
                 gui._call_function_with_state(compare_fn, [name, [gui._get_real_var_name(n) for n in names]])
             )
         elif compare_fn is not None:
             _warn(f"{compare_name}(): compare function name is not valid.")
-        dfs = [gui._accessors._get_dataframe(_getscopeattr(gui, n)) for n in names]
+        dfs = [gui._get_accessor().to_pandas(_getscopeattr(gui, n)) for n in names]
         return value.compare(dfs[0], keep_shape=True)
     except Exception as e:
         if not gui._call_on_exception(compare_name or "Gui._compare_function", e):

+ 39 - 33
taipy/gui/data/data_accessor.py

@@ -17,10 +17,16 @@ from .._warnings import _warn
 from ..utils import _TaipyData
 from .data_format import _DataFormat
 
+if t.TYPE_CHECKING:
+    from ..gui import Gui
+
 
 class _DataAccessor(ABC):
     _WS_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
 
+    def __init__(self, gui: "Gui") -> None:
+        self._gui = gui
+
     @staticmethod
     @abstractmethod
     def get_supported_classes() -> t.List[str]:
@@ -28,7 +34,7 @@ class _DataAccessor(ABC):
 
     @abstractmethod
     def get_data(
-        self, guiApp: t.Any, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
         pass
 
@@ -37,23 +43,23 @@ class _DataAccessor(ABC):
         pass
 
     @abstractmethod
-    def _get_dataframe(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
+    def to_pandas(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
         pass
 
     @abstractmethod
-    def _on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
         pass
 
     @abstractmethod
-    def _on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
         pass
 
     @abstractmethod
-    def _on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
         pass
 
     @abstractmethod
-    def _to_csv(self, guiApp: t.Any, var_name: str, value: t.Any):
+    def to_csv(self, var_name: str, value: t.Any):
         pass
 
 
@@ -63,36 +69,35 @@ class _InvalidDataAccessor(_DataAccessor):
         return [type(None).__name__]
 
     def get_data(
-        self, guiApp: t.Any, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
         return {}
 
     def get_col_types(self, var_name: str, value: t.Any) -> t.Dict[str, str]:
         return {}
 
-    def _get_dataframe(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
+    def to_pandas(self, value: t.Any) -> t.Union[t.List[t.Any], t.Any]:
         return None
 
-    def _on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
         return None
 
-    def _on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
         return None
 
-    def _on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
         return None
 
-    def _to_csv(self, guiApp: t.Any, var_name: str, value: t.Any):
+    def to_csv(self, var_name: str, value: t.Any):
         return None
 
 
 class _DataAccessors(object):
-    def __init__(self) -> None:
+    def __init__(self, gui: "Gui") -> None:
         self.__access_4_type: t.Dict[str, _DataAccessor] = {}
-
-        self.__invalid_data_accessor = _InvalidDataAccessor()
-
+        self.__invalid_data_accessor = _InvalidDataAccessor(gui)
         self.__data_format = _DataFormat.JSON
+        self.__gui = gui
 
         from .array_dict_data_accessor import _ArrayDictDataAccessor
         from .numpy_data_accessor import _NumpyDataAccessor
@@ -118,7 +123,7 @@ class _DataAccessors(object):
                 break
         if inst is None:
             try:
-                inst = cls()
+                inst = cls(self.__gui)
             except Exception as e:
                 raise TypeError(f"Class {cls.__name__} cannot be instantiated") from e
             if inst:
@@ -134,28 +139,29 @@ class _DataAccessors(object):
             return self.__invalid_data_accessor
         return access
 
-    def _get_data(
-        self, guiApp: t.Any, var_name: str, value: _TaipyData, payload: t.Dict[str, t.Any]
-    ) -> t.Dict[str, t.Any]:
-        return self.__get_instance(value).get_data(guiApp, var_name, value.get(), payload, self.__data_format)
+    def get_data(self, var_name: str, value: _TaipyData, payload: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
+        return self.__get_instance(value).get_data(var_name, value.get(), payload, self.__data_format)
 
-    def _get_col_types(self, var_name: str, value: _TaipyData) -> t.Dict[str, str]:
+    def get_col_types(self, var_name: str, value: _TaipyData) -> t.Dict[str, str]:
         return self.__get_instance(value).get_col_types(var_name, value.get())
 
-    def _set_data_format(self, data_format: _DataFormat):
+    def set_data_format(self, data_format: _DataFormat):
         self.__data_format = data_format
 
-    def _get_dataframe(self, value: t.Any):
-        return self.__get_instance(value)._get_dataframe(value)
+    def get_dataframe(self, value: t.Any):
+        return self.__get_instance(value).to_pandas(value)
+
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        return self.__get_instance(value).on_edit(value, payload)
 
-    def _on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
-        return self.__get_instance(value)._on_edit(value, payload)
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        return self.__get_instance(value).on_delete(value, payload)
 
-    def _on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
-        return self.__get_instance(value)._on_delete(value, payload)
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+        return self.__get_instance(value).on_add(value, payload, new_row)
 
-    def _on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
-        return self.__get_instance(value)._on_add(value, payload, new_row)
+    def to_csv(self, var_name: str, value: t.Any):
+        return self.__get_instance(value).to_csv(var_name, value.get())
 
-    def _to_csv(self, guiApp: t.Any, var_name: str, value: t.Any):
-        return self.__get_instance(value)._to_csv(guiApp, var_name, value.get())
+    def to_pandas(self, value: t.Any):
+        return self.__get_instance(value).to_pandas(value.get())

+ 5 - 14
taipy/gui/data/numpy_data_accessor.py

@@ -14,8 +14,6 @@ import typing as t
 import numpy
 import pandas as pd
 
-from ..gui import Gui
-from .data_format import _DataFormat
 from .pandas_data_accessor import _PandasDataAccessor
 
 
@@ -26,17 +24,10 @@ class _NumpyDataAccessor(_PandasDataAccessor):
     def get_supported_classes() -> t.List[str]:
         return [t.__name__ for t in _NumpyDataAccessor.__types]  # type: ignore
 
-    def _get_dataframe(self, value: t.Any) -> pd.DataFrame:
+    def to_pandas(self, value: t.Any) -> pd.DataFrame:
         return pd.DataFrame(value)
 
-    def get_col_types(self, var_name: str, value: t.Any) -> t.Union[None, t.Dict[str, str]]:  # type: ignore
-        if isinstance(value, _NumpyDataAccessor.__types):  # type: ignore
-            return super().get_col_types(var_name, self._get_dataframe(value))
-        return None
-
-    def get_data(  # noqa: C901
-        self, guiApp: Gui, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
-    ) -> t.Dict[str, t.Any]:
-        if isinstance(value, _NumpyDataAccessor.__types):  # type: ignore
-            return super().get_data(guiApp, var_name, self._get_dataframe(value), payload, data_format)
-        return {}
+    def _from_pandas(self, value: pd.DataFrame, type: t.Type):
+        if type is numpy.ndarray:
+            return value.to_numpy()
+        return super()._from_pandas(value, type)

+ 88 - 75
taipy/gui/data/pandas_data_accessor.py

@@ -41,7 +41,17 @@ class _PandasDataAccessor(_DataAccessor):
 
     __AGGREGATE_FUNCTIONS: t.List[str] = ["count", "sum", "mean", "median", "min", "max", "std", "first", "last"]
 
-    def _get_dataframe(self, value: t.Any) -> t.Any:
+    def to_pandas(self, value: t.Union[pd.DataFrame, pd.Series]) -> t.Union[t.List[pd.DataFrame], pd.DataFrame]:
+        return self.__to_dataframe(value)
+
+    def __to_dataframe(self, value: t.Union[pd.DataFrame, pd.Series]) -> pd.DataFrame:
+        if isinstance(value, pd.Series):
+            return pd.DataFrame(value)
+        return t.cast(pd.DataFrame, value)
+
+    def _from_pandas(self, value: pd.DataFrame, data_type: t.Type):
+        if data_type is pd.Series:
+            return value.iloc[:, 0]
         return value
 
     @staticmethod
@@ -70,7 +80,6 @@ class _PandasDataAccessor(_DataAccessor):
 
     def __build_transferred_cols(
         self,
-        gui: Gui,
         payload_cols: t.Any,
         dataframe: pd.DataFrame,
         styles: t.Optional[t.Dict[str, str]] = None,
@@ -91,9 +100,9 @@ class _PandasDataAccessor(_DataAccessor):
                 is_copied = True
             for k, v in styles.items():
                 col_applied = False
-                func = gui._get_user_function(v)
+                func = self._gui._get_user_function(v)
                 if callable(func):
-                    col_applied = self.__apply_user_function(gui, func, k if k in cols else None, v, dataframe, "tps__")
+                    col_applied = self.__apply_user_function(func, k if k in cols else None, v, dataframe, "tps__")
                 if not col_applied:
                     dataframe[v] = v
                 cols.append(col_applied or v)
@@ -104,9 +113,9 @@ class _PandasDataAccessor(_DataAccessor):
                 is_copied = True
             for k, v in tooltips.items():
                 col_applied = False
-                func = gui._get_user_function(v)
+                func = self._gui._get_user_function(v)
                 if callable(func):
-                    col_applied = self.__apply_user_function(gui, func, k if k in cols else None, v, dataframe, "tpt__")
+                    col_applied = self.__apply_user_function(func, k if k in cols else None, v, dataframe, "tpt__")
                 cols.append(col_applied or v)
         # deal with dates
         datecols = col_types[col_types.astype(str).str.startswith("datetime")].index.tolist()  # type: ignore
@@ -146,7 +155,6 @@ class _PandasDataAccessor(_DataAccessor):
 
     def __apply_user_function(
         self,
-        gui: Gui,
         user_function: t.Callable,
         column_name: t.Optional[str],
         function_name: str,
@@ -158,7 +166,7 @@ class _PandasDataAccessor(_DataAccessor):
             data[new_col_name] = data.apply(
                 _PandasDataAccessor.__user_function,
                 axis=1,
-                args=(gui, column_name, user_function, function_name),
+                args=(self._gui, column_name, user_function, function_name),
             )
             return new_col_name
         except Exception as e:
@@ -210,22 +218,19 @@ class _PandasDataAccessor(_DataAccessor):
         return ret
 
     def get_col_types(self, var_name: str, value: t.Any) -> t.Union[None, t.Dict[str, str]]:  # type: ignore
-        if isinstance(value, pd.Series):
-            value = value.to_frame()
-        if isinstance(value, pd.DataFrame):  # type: ignore
-            return {str(k): v for k, v in value.dtypes.apply(lambda x: x.name.lower()).items()}
-        elif isinstance(value, list):
+        if isinstance(value, list):
             ret_dict: t.Dict[str, str] = {}
             for i, v in enumerate(value):
-                ret_dict.update({f"{i}/{k}": v for k, v in v.dtypes.apply(lambda x: x.name.lower()).items()})
+                ret_dict.update(
+                    {f"{i}/{k}": v for k, v in self.__to_dataframe(v).dtypes.apply(lambda x: x.name.lower()).items()}
+                )
             return ret_dict
-        return None
+        return {str(k): v for k, v in self.__to_dataframe(value).dtypes.apply(lambda x: x.name.lower()).items()}
 
     def __get_data(  # noqa: C901
         self,
-        gui: Gui,
         var_name: str,
-        value: t.Union[pd.DataFrame, pd.Series],
+        df: pd.DataFrame,
         payload: t.Dict[str, t.Any],
         data_format: _DataFormat,
         col_prefix: t.Optional[str] = "",
@@ -237,19 +242,17 @@ class _PandasDataAccessor(_DataAccessor):
         paged = not payload.get("alldata", False)
         is_copied = False
 
-        if isinstance(value, pd.Series):
-            value = value.to_frame()
-        orig_df = value
+        orig_df = df
         # add index if not chart
         if paged:
-            if _PandasDataAccessor.__INDEX_COL not in value.columns:
-                value = value.copy()
+            if _PandasDataAccessor.__INDEX_COL not in df.columns:
+                df = df.copy()
                 is_copied = True
-                value[_PandasDataAccessor.__INDEX_COL] = value.index
+                df[_PandasDataAccessor.__INDEX_COL] = df.index
             if columns and _PandasDataAccessor.__INDEX_COL not in columns:
                 columns.append(_PandasDataAccessor.__INDEX_COL)
 
-        fullrowcount = len(value)
+        fullrowcount = len(df)
         # filtering
         filters = payload.get("filters")
         if isinstance(filters, list) and len(filters) > 0:
@@ -260,7 +263,7 @@ class _PandasDataAccessor(_DataAccessor):
                 val = fd.get("value")
                 action = fd.get("action")
                 if isinstance(val, str):
-                    if self.__is_date_column(t.cast(pd.DataFrame, value), col):
+                    if self.__is_date_column(t.cast(pd.DataFrame, df), col):
                         val = datetime.fromisoformat(val[:-1])
                     vars.append(val)
                 val = f"@vars[{len(vars) - 1}]" if isinstance(val, (str, datetime)) else val
@@ -269,10 +272,10 @@ class _PandasDataAccessor(_DataAccessor):
                     query += " and "
                 query += f"`{col}`{right}"
             try:
-                value = value.query(query)
+                df = df.query(query)
                 is_copied = True
             except Exception as e:
-                _warn(f"Dataframe filtering: invalid query '{query}' on {value.head()}", e)
+                _warn(f"Dataframe filtering: invalid query '{query}' on {df.head()}", e)
 
         dictret: t.Optional[t.Dict[str, t.Any]]
         if paged:
@@ -280,7 +283,7 @@ class _PandasDataAccessor(_DataAccessor):
             applies = payload.get("applies")
             if isinstance(aggregates, list) and len(aggregates) and isinstance(applies, dict):
                 applies_with_fn = {
-                    k: v if v in _PandasDataAccessor.__AGGREGATE_FUNCTIONS else gui._get_user_function(v)
+                    k: v if v in _PandasDataAccessor.__AGGREGATE_FUNCTIONS else self._gui._get_user_function(v)
                     for k, v in applies.items()
                 }
 
@@ -288,14 +291,14 @@ class _PandasDataAccessor(_DataAccessor):
                     if col not in applies_with_fn.keys():
                         applies_with_fn[col] = "first"
                 try:
-                    value = t.cast(pd.DataFrame, value).groupby(aggregates).agg(applies_with_fn)
+                    df = t.cast(pd.DataFrame, df).groupby(aggregates).agg(applies_with_fn)
                 except Exception:
                     _warn(f"Cannot aggregate {var_name} with groupby {aggregates} and aggregates {applies}.")
             inf = payload.get("infinite")
             if inf is not None:
                 ret_payload["infinite"] = inf
             # real number of rows is needed to calculate the number of pages
-            rowcount = len(value)
+            rowcount = len(df)
             # here we'll deal with start and end values from payload if present
             if isinstance(payload["start"], int):
                 start = int(payload["start"])
@@ -328,9 +331,9 @@ class _PandasDataAccessor(_DataAccessor):
             order_by = payload.get("orderby")
             if isinstance(order_by, str) and len(order_by):
                 try:
-                    if value.columns.dtype.name == "int64":
+                    if df.columns.dtype.name == "int64":
                         order_by = int(order_by)
-                    new_indexes = t.cast(pd.DataFrame, value)[order_by].values.argsort(axis=0)
+                    new_indexes = t.cast(pd.DataFrame, df)[order_by].values.argsort(axis=0)
                     if payload.get("sort") == "desc":
                         # reverse order
                         new_indexes = new_indexes[::-1]
@@ -340,10 +343,9 @@ class _PandasDataAccessor(_DataAccessor):
                     new_indexes = slice(start, end + 1)  # type: ignore
             else:
                 new_indexes = slice(start, end + 1)  # type: ignore
-            value = self.__build_transferred_cols(
-                gui,
+            df = self.__build_transferred_cols(
                 columns,
-                t.cast(pd.DataFrame, value),
+                t.cast(pd.DataFrame, df),
                 styles=payload.get("styles"),
                 tooltips=payload.get("tooltips"),
                 is_copied=is_copied,
@@ -351,7 +353,7 @@ class _PandasDataAccessor(_DataAccessor):
                 handle_nan=payload.get("handlenan", False),
             )
             dictret = self.__format_data(
-                value,
+                df,
                 data_format,
                 "records",
                 start,
@@ -362,7 +364,7 @@ class _PandasDataAccessor(_DataAccessor):
             compare = payload.get("compare")
             if isinstance(compare, str):
                 comp_df = _compare_function(
-                    gui, compare, var_name, t.cast(pd.DataFrame, orig_df), payload.get("compare_datas", "")
+                    self._gui, compare, var_name, t.cast(pd.DataFrame, orig_df), payload.get("compare_datas", "")
                 )
                 if isinstance(comp_df, pd.DataFrame) and not comp_df.empty:
                     try:
@@ -371,7 +373,7 @@ class _PandasDataAccessor(_DataAccessor):
                             comp_df = t.cast(pd.DataFrame, comp_df.get(cols))
                             comp_df.columns = t.cast(pd.Index, [t.cast(tuple, c)[0] for c in cols])
                         comp_df.dropna(axis=1, how="all", inplace=True)
-                        comp_df = self.__build_transferred_cols(gui, columns, comp_df, new_indexes=new_indexes)
+                        comp_df = self.__build_transferred_cols(columns, comp_df, new_indexes=new_indexes)
                         dictret["comp"] = self.__format_data(comp_df, data_format, "records").get("data")
                     except Exception as e:
                         _warn("Pandas accessor compare raised an exception", e)
@@ -384,7 +386,9 @@ class _PandasDataAccessor(_DataAccessor):
             for decimator_pl in decimators:
                 decimator = decimator_pl.get("decimator")
                 decimator_instance = (
-                    gui._get_user_instance(decimator, PropertyType.decimator.value) if decimator is not None else None
+                    self._gui._get_user_instance(decimator, PropertyType.decimator.value)
+                    if decimator is not None
+                    else None
                 )
                 if isinstance(decimator_instance, PropertyType.decimator.value):
                     x_column, y_column, z_column = (
@@ -400,14 +404,14 @@ class _PandasDataAccessor(_DataAccessor):
                         y0 = relayoutData.get("yaxis.range[0]")
                         y1 = relayoutData.get("yaxis.range[1]")
 
-                        value, is_copied = _df_relayout(
-                            t.cast(pd.DataFrame, value), x_column, y_column, chart_mode, x0, x1, y0, y1, is_copied
+                        df, is_copied = _df_relayout(
+                            t.cast(pd.DataFrame, df), x_column, y_column, chart_mode, x0, x1, y0, y1, is_copied
                         )
 
-                    if nb_rows_max and decimator_instance._is_applicable(value, nb_rows_max, chart_mode):
+                    if nb_rows_max and decimator_instance._is_applicable(df, nb_rows_max, chart_mode):
                         try:
-                            value, is_copied = _df_data_filter(
-                                t.cast(pd.DataFrame, value),
+                            df, is_copied = _df_data_filter(
+                                t.cast(pd.DataFrame, df),
                                 x_column,
                                 y_column,
                                 z_column,
@@ -415,21 +419,21 @@ class _PandasDataAccessor(_DataAccessor):
                                 payload=decimator_payload,
                                 is_copied=is_copied,
                             )
-                            gui._call_on_change(f"{var_name}.{decimator}.nb_rows", len(value))
+                            self._gui._call_on_change(f"{var_name}.{decimator}.nb_rows", len(df))
                         except Exception as e:
                             _warn(f"Limit rows error with {decimator} for Dataframe", e)
-            value = self.__build_transferred_cols(gui, columns, t.cast(pd.DataFrame, value), is_copied=is_copied)
+            df = self.__build_transferred_cols(columns, t.cast(pd.DataFrame, df), is_copied=is_copied)
             if data_format is _DataFormat.CSV:
-                ret_payload["df"] = value
+                ret_payload["df"] = df
                 dictret = None
             else:
-                dictret = self.__format_data(value, data_format, "list", data_extraction=True)
+                dictret = self.__format_data(df, data_format, "list", data_extraction=True)
 
         ret_payload["value"] = dictret
         return ret_payload
 
     def get_data(
-        self, gui: Gui, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
+        self, var_name: str, value: t.Any, payload: t.Dict[str, t.Any], data_format: _DataFormat
     ) -> t.Dict[str, t.Any]:
         if isinstance(value, list):
             # If is_chart data
@@ -442,7 +446,7 @@ class _PandasDataAccessor(_DataAccessor):
                 data = []
                 for i, v in enumerate(value):
                     ret = (
-                        self.__get_data(gui, var_name, v, payload, data_format, f"{i}/")
+                        self.__get_data(var_name, self.__to_dataframe(v), payload, data_format, f"{i}/")
                         if isinstance(v, _PandasDataAccessor.__types)
                         else {}
                     )
@@ -453,18 +457,25 @@ class _PandasDataAccessor(_DataAccessor):
                 return ret_payload
             else:
                 value = value[0]
-        if isinstance(value, _PandasDataAccessor.__types):  # type: ignore
-            return self.__get_data(gui, var_name, value, payload, data_format)
-        return {}
+        return self.__get_data(var_name, self.__to_dataframe(value), payload, data_format)
 
-    def _on_edit(self, df: pd.DataFrame, payload: t.Dict[str, t.Any]):
+    def on_edit(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot edit {type(value)}.")
         df.at[payload["index"], payload["col"]] = payload["value"]
-        return df
-
-    def _on_delete(self, df: pd.DataFrame, payload: t.Dict[str, t.Any]):
-        return df.drop(payload["index"])
-
-    def _on_add(self, df: pd.DataFrame, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+        return self._from_pandas(df, type(value))
+
+    def on_delete(self, value: t.Any, payload: t.Dict[str, t.Any]):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot delete a row from {type(value)}.")
+        return self._from_pandas(df.drop(payload["index"]), type(value))
+
+    def on_add(self, value: t.Any, payload: t.Dict[str, t.Any], new_row: t.Optional[t.List[t.Any]] = None):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot add a row to {type(value)}.")
         # Save the insertion index
         index = payload["index"]
         # Create the new row (Column value types must match the original DataFrame's)
@@ -477,23 +488,25 @@ class _PandasDataAccessor(_DataAccessor):
                 # Split the DataFrame
                 rows_before = df.loc[: index - 1]
                 rows_after = df.loc[index + 1 :]
-                return pd.concat([rows_before, new_df, rows_after], ignore_index=True)
+                return self._from_pandas(pd.concat([rows_before, new_df, rows_after], ignore_index=True), type(value))
             else:
                 # Insert as the new first row
                 df.loc[-1] = new_row  # Insert the new row
                 df.index = df.index + 1  # Shift index
-                return df.sort_index()
-        return df
-
-    def _to_csv(self, gui: Gui, var_name: str, value: pd.DataFrame):
-        if isinstance(value, _PandasDataAccessor.__types):  # type: ignore
-            dict_ret = self.__get_data(gui, var_name, value, {"alldata": True}, _DataFormat.CSV)
-            if isinstance(dict_ret, dict):
-                df = dict_ret.get("df")
-                if isinstance(df, pd.DataFrame):
-                    fd, temp_path = mkstemp(".csv", var_name, text=True)
-                    with os.fdopen(fd, "wt", newline="") as csv_file:
-                        df.to_csv(csv_file, index=False)
-
-                    return temp_path
+                return self._from_pandas(df.sort_index(), type(value))
+        return value
+
+    def to_csv(self, var_name: str, value: t.Any):
+        df = self.to_pandas(value)
+        if not isinstance(df, pd.DataFrame):
+            raise ValueError(f"Cannot export {type(value)} to csv.")
+        dict_ret = self.__get_data(var_name, df, {"alldata": True}, _DataFormat.CSV)
+        if isinstance(dict_ret, dict):
+            dfr = dict_ret.get("df")
+            if isinstance(dfr, pd.DataFrame):
+                fd, temp_path = mkstemp(".csv", var_name, text=True)
+                with os.fdopen(fd, "wt", newline="") as csv_file:
+                    dfr.to_csv(csv_file, index=False)
+
+                return temp_path
         return None

+ 15 - 14
taipy/gui/gui.py

@@ -62,7 +62,7 @@ from .builder import _ElementApiGenerator
 from .config import Config, ConfigParameter, _Config
 from .custom import Page as CustomPage
 from .data.content_accessor import _ContentAccessor
-from .data.data_accessor import _DataAccessor, _DataAccessors
+from .data.data_accessor import _DataAccessors
 from .data.data_format import _DataFormat
 from .data.data_scope import _DataScopes
 from .extension.library import Element, ElementLibrary
@@ -312,7 +312,7 @@ class Gui:
 
         self._config = _Config()
         self.__content_accessor = None
-        self._accessors = _DataAccessors()
+        self.__accessors: t.Optional[_DataAccessors] = None
         self.__state: t.Optional[State] = None
         self.__bindings = _Bindings(self)
         self.__locals_context = _LocalsContext()
@@ -1099,7 +1099,7 @@ class Gui:
                                 e,
                             )
             if not isinstance(ret_payload, dict):
-                ret_payload = self._accessors._get_data(self, var_name, newvalue, payload)
+                ret_payload = self._get_accessor().get_data(var_name, newvalue, payload)
             self.__send_ws_update_with_dict({var_name: ret_payload})
 
     def __request_var_update(self, payload: t.Any):
@@ -1408,8 +1408,7 @@ class Gui:
     def __download_csv(self, state: State, var_name: str, payload: dict):
         holder_name = t.cast(str, payload.get("var_name"))
         try:
-            csv_path = self._accessors._to_csv(
-                self,
+            csv_path = self._get_accessor().to_csv(
                 holder_name,
                 _getscopeattr(self, holder_name, None),
             )
@@ -1661,7 +1660,7 @@ class Gui:
         TODO: Default implementation of on_edit for tables
         """
         try:
-            setattr(state, var_name, self._accessors._on_edit(getattr(state, var_name), payload))
+            setattr(state, var_name, self._get_accessor().on_edit(getattr(state, var_name), payload))
         except Exception as e:
             _warn("TODO: Table.on_edit", e)
 
@@ -1670,7 +1669,7 @@ class Gui:
         TODO: Default implementation of on_delete for tables
         """
         try:
-            setattr(state, var_name, self._accessors._on_delete(getattr(state, var_name), payload))
+            setattr(state, var_name, self._get_accessor().on_delete(getattr(state, var_name), payload))
         except Exception as e:
             _warn("TODO: Table.on_delete", e)
 
@@ -1681,7 +1680,7 @@ class Gui:
         TODO: Default implementation of on_add for tables
         """
         try:
-            setattr(state, var_name, self._accessors._on_add(getattr(state, var_name), payload, new_row))
+            setattr(state, var_name, self._get_accessor().on_add(getattr(state, var_name), payload, new_row))
         except Exception as e:
             _warn("TODO: Table.on_add", e)
 
@@ -1698,7 +1697,7 @@ class Gui:
                     col_dict = _get_columns_dict(
                         data,
                         attributes.get("columns", {}),
-                        self._accessors._get_col_types(data_hash, _TaipyData(data, data_hash)),
+                        self._get_accessor().get_col_types(data_hash, _TaipyData(data, data_hash)),
                         attributes.get("date_format"),
                         attributes.get("number_format"),
                     )
@@ -1721,7 +1720,7 @@ class Gui:
                     config = _build_chart_config(
                         self,
                         attributes,
-                        self._accessors._get_col_types(data_hash, _TaipyData(kwargs.get(data_hash), data_hash)),
+                        self._get_accessor().get_col_types(data_hash, _TaipyData(kwargs.get(data_hash), data_hash)),
                     )
 
                     return json.dumps(config, cls=_TaipyJsonEncoder)
@@ -2348,9 +2347,6 @@ class Gui:
             }
         )
 
-    def _register_data_accessor(self, data_accessor_class: t.Type[_DataAccessor]) -> None:
-        self._accessors._register(data_accessor_class)
-
     def get_flask_app(self) -> Flask:
         """Get the internal Flask application.
 
@@ -2560,6 +2556,11 @@ class Gui:
         for bp in self._flask_blueprint:
             self._server.get_flask().register_blueprint(bp)
 
+    def _get_accessor(self):
+        if self.__accessors is None:
+            self.__accessors = _DataAccessors(self)
+        return self.__accessors
+
     def run(
         self,
         run_server: bool = True,
@@ -2699,7 +2700,7 @@ class Gui:
         self.__register_blueprint()
 
         # Register data accessor communication data format (JSON, Apache Arrow)
-        self._accessors._set_data_format(_DataFormat.APACHE_ARROW if app_config["use_arrow"] else _DataFormat.JSON)
+        self._get_accessor().set_data_format(_DataFormat.APACHE_ARROW if app_config["use_arrow"] else _DataFormat.JSON)
 
         # Use multi user or not
         self._bindings()._set_single_client(bool(app_config["single_client"]))

+ 21 - 23
tests/gui/data/test_array_dict_data_accessor.py

@@ -20,8 +20,8 @@ an_array = [1, 2, 3]
 
 
 def test_simple_data(gui: Gui, helpers):
-    accessor = _ArrayDictDataAccessor()
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
+    accessor = _ArrayDictDataAccessor(gui)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -32,8 +32,8 @@ def test_simple_data(gui: Gui, helpers):
 
 def test_simple_data_with_arrow(gui: Gui, helpers):
     if util.find_spec("pyarrow"):
-        accessor = _ArrayDictDataAccessor()
-        ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
+        accessor = _ArrayDictDataAccessor(gui)
+        ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
         assert ret_data
         value = ret_data["value"]
         assert value
@@ -43,29 +43,29 @@ def test_simple_data_with_arrow(gui: Gui, helpers):
 
 
 def test_slice(gui: Gui, helpers):
-    accessor = _ArrayDictDataAccessor()
-    value = accessor.get_data(gui, "x", an_array, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
+    accessor = _ArrayDictDataAccessor(gui)
+    value = accessor.get_data("x", an_array, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     assert len(data) == 2
-    value = accessor.get_data(gui, "x", an_array, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", an_array, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
     data = value["data"]
     assert len(data) == 2
 
 
 def test_sort(gui: Gui, helpers):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     a_dict = {"name": ["A", "B", "C"], "value": [3, 2, 1]}
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "orderby": "name", "sort": "desc"}
-    data = accessor.get_data(gui, "x", a_dict, query, _DataFormat.JSON)["value"]["data"]
+    data = accessor.get_data("x", a_dict, query, _DataFormat.JSON)["value"]["data"]
     assert data[0]["name"] == "C"
 
 
 def test_aggregate(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     a_dict = {"name": ["A", "B", "C", "A"], "value": [3, 2, 1, 2]}
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "aggregates": ["name"], "applies": {"value": "sum"}}
-    value = accessor.get_data(gui, "x", a_dict, query, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", a_dict, query, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     agregValue = next(v.get("value") for v in data if v.get("name") == "A")
@@ -73,9 +73,9 @@ def test_aggregate(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_array(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array = [[1, 2, 3], [2, 4, 6]]
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -86,9 +86,9 @@ def test_array_of_array(gui: Gui, helpers, small_dataframe):
 
 
 def test_empty_array(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array: list[str] = []
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -98,9 +98,9 @@ def test_empty_array(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_diff_array(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array = [[1, 2, 3], [2, 4]]
-    ret_data = accessor.get_data(gui, "x", an_array, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", an_array, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -112,7 +112,7 @@ def test_array_of_diff_array(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_dicts(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     an_array_of_dicts = [
         {
             "temperatures": [
@@ -126,9 +126,7 @@ def test_array_of_dicts(gui: Gui, helpers, small_dataframe):
         },
         {"seasons": ["Winter", "Summer", "Spring", "Autumn"]},
     ]
-    ret_data = accessor.get_data(
-        gui, "x", an_array_of_dicts, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON
-    )
+    ret_data = accessor.get_data("x", an_array_of_dicts, {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -140,7 +138,7 @@ def test_array_of_dicts(gui: Gui, helpers, small_dataframe):
 
 
 def test_array_of_Mapdicts(gui: Gui, helpers, small_dataframe):
-    accessor = _ArrayDictDataAccessor()
+    accessor = _ArrayDictDataAccessor(gui)
     dict1 = _MapDict(
         {
             "temperatures": [
@@ -154,7 +152,7 @@ def test_array_of_Mapdicts(gui: Gui, helpers, small_dataframe):
         }
     )
     dict2 = _MapDict({"seasons": ["Winter", "Summer", "Spring", "Autumn"]})
-    ret_data = accessor.get_data(gui, "x", [dict1, dict2], {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", [dict1, dict2], {"start": 0, "end": -1, "alldata": True}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value

+ 27 - 28
tests/gui/data/test_pandas_data_accessor.py

@@ -13,7 +13,7 @@ import inspect
 from datetime import datetime
 from importlib import util
 
-import pandas  # type: ignore
+import pandas
 from flask import g
 
 from taipy.gui import Gui
@@ -23,9 +23,9 @@ from taipy.gui.data.pandas_data_accessor import _PandasDataAccessor
 
 
 def test_simple_data(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
-    ret_data = accessor.get_data(gui, "x", pd, {"start": 0, "end": -1}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", pd, {"start": 0, "end": -1}, _DataFormat.JSON)
     assert ret_data
     value = ret_data["value"]
     assert value
@@ -36,9 +36,9 @@ def test_simple_data(gui: Gui, helpers, small_dataframe):
 
 def test_simple_data_with_arrow(gui: Gui, helpers, small_dataframe):
     if util.find_spec("pyarrow"):
-        accessor = _PandasDataAccessor()
+        accessor = _PandasDataAccessor(gui)
         pd = pandas.DataFrame(data=small_dataframe)
-        ret_data = accessor.get_data(gui, "x", pd, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
+        ret_data = accessor.get_data("x", pd, {"start": 0, "end": -1}, _DataFormat.APACHE_ARROW)
         assert ret_data
         value = ret_data["value"]
         assert value
@@ -48,9 +48,9 @@ def test_simple_data_with_arrow(gui: Gui, helpers, small_dataframe):
 
 
 def test_get_all_simple_data(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
-    ret_data = accessor.get_data(gui, "x", pd, {"alldata": True}, _DataFormat.JSON)
+    ret_data = accessor.get_data("x", pd, {"alldata": True}, _DataFormat.JSON)
     assert ret_data
     assert ret_data["alldata"] is True
     value = ret_data["value"]
@@ -60,40 +60,40 @@ def test_get_all_simple_data(gui: Gui, helpers, small_dataframe):
 
 
 def test_slice(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
-    value = accessor.get_data(gui, "x", pd, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", pd, {"start": 0, "end": 1}, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     assert len(data) == 2
-    value = accessor.get_data(gui, "x", pd, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", pd, {"start": "0", "end": "1"}, _DataFormat.JSON)["value"]
     data = value["data"]
     assert len(data) == 2
 
 
 def test_sort(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "orderby": "name", "sort": "desc"}
-    data = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)["value"]["data"]
+    data = accessor.get_data("x", pd, query, _DataFormat.JSON)["value"]["data"]
     assert data[0]["name"] == "C"
 
 
 def test_aggregate(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     pd = pandas.concat(
         [pd, pandas.DataFrame(data={"name": ["A"], "value": [4]})], axis=0, join="outer", ignore_index=True
     )
     query = {"columns": ["name", "value"], "start": 0, "end": -1, "aggregates": ["name"], "applies": {"value": "sum"}}
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)["value"]
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)["value"]
     assert value["rowcount"] == 3
     data = value["data"]
     assert next(v.get("value") for v in data if v.get("name") == "A") == 5
 
 
 def test_filters(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     pd = pandas.concat(
         [pd, pandas.DataFrame(data={"name": ["A"], "value": [4]})], axis=0, join="outer", ignore_index=True
@@ -104,7 +104,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "!=", "value": ""}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 4
 
     query = {
@@ -113,7 +113,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "==", "value": ""}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
 
     query = {
@@ -122,7 +122,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "==", "value": "A"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 2
 
     query = {
@@ -131,7 +131,7 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "==", "value": "A"}, {"col": "value", "action": "==", "value": 2}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
 
     query = {
@@ -140,13 +140,13 @@ def test_filters(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "name", "action": "!=", "value": "A"}, {"col": "value", "action": "==", "value": 2}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 1
     assert value["value"]["data"][0]["_tp_index"] == 1
 
 
 def test_filter_by_date(gui: Gui, helpers, small_dataframe):
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
     pd["a date"] = [
         datetime.fromisocalendar(2022, 28, 1),
@@ -159,7 +159,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 3).isoformat() + "Z"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
     query = {
         "columns": ["name", "value"],
@@ -167,7 +167,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 2).isoformat() + "Z"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 1
     query = {
         "columns": ["name", "value"],
@@ -175,7 +175,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
         "end": -1,
         "filters": [{"col": "a date", "action": "<", "value": datetime.fromisocalendar(2022, 28, 3).isoformat() + "Z"}],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 2
     query = {
         "columns": ["name", "value"],
@@ -186,7 +186,7 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
             {"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 2).isoformat() + "Z"},
         ],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 0
     query = {
         "columns": ["name", "value"],
@@ -197,14 +197,14 @@ def test_filter_by_date(gui: Gui, helpers, small_dataframe):
             {"col": "a date", "action": ">", "value": datetime.fromisocalendar(2022, 28, 1).isoformat() + "Z"},
         ],
     }
-    value = accessor.get_data(gui, "x", pd, query, _DataFormat.JSON)
+    value = accessor.get_data("x", pd, query, _DataFormat.JSON)
     assert len(value["value"]["data"]) == 1
 
 
 def test_decimator(gui: Gui, helpers, small_dataframe):
     a_decimator = ScatterDecimator()  # noqa: F841
 
-    accessor = _PandasDataAccessor()
+    accessor = _PandasDataAccessor(gui)
     pd = pandas.DataFrame(data=small_dataframe)
 
     # set gui frame
@@ -221,7 +221,6 @@ def test_decimator(gui: Gui, helpers, small_dataframe):
         g.client_id = cid
 
         ret_data = accessor.get_data(
-            gui,
             "x",
             pd,
             {