conftest.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404
  1. # Copyright 2021-2025 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import pickle
  13. import shutil
  14. from datetime import datetime
  15. from queue import Queue
  16. from unittest.mock import patch
  17. import pandas as pd
  18. import pytest
  19. from sqlalchemy import create_engine, text
  20. from sqlalchemy.orm import close_all_sessions
  21. from taipy.common.config import Config
  22. from taipy.common.config.checker._checker import _Checker
  23. from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
  24. from taipy.core._version._version import _Version
  25. from taipy.core._version._version_manager_factory import _VersionManagerFactory
  26. from taipy.core.common.frequency import Frequency
  27. from taipy.core.common.scope import Scope
  28. from taipy.core.config import (
  29. _ConfigIdChecker,
  30. _CoreSectionChecker,
  31. _DataNodeConfigChecker,
  32. _JobConfigChecker,
  33. _ScenarioConfigChecker,
  34. _TaskConfigChecker,
  35. )
  36. from taipy.core.cycle._cycle_manager_factory import _CycleManagerFactory
  37. from taipy.core.cycle._cycle_model import _CycleModel
  38. from taipy.core.cycle.cycle import Cycle
  39. from taipy.core.cycle.cycle_id import CycleId
  40. from taipy.core.data._data_manager_factory import _DataManagerFactory
  41. from taipy.core.data._data_model import _DataNodeModel
  42. from taipy.core.data.in_memory import DataNodeId, InMemoryDataNode
  43. from taipy.core.job._job_manager_factory import _JobManagerFactory
  44. from taipy.core.job.job import Job
  45. from taipy.core.job.job_id import JobId
  46. from taipy.core.notification.notifier import Notifier
  47. from taipy.core.orchestrator import Orchestrator
  48. from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory
  49. from taipy.core.scenario._scenario_model import _ScenarioModel
  50. from taipy.core.scenario.scenario import Scenario
  51. from taipy.core.scenario.scenario_id import ScenarioId
  52. from taipy.core.sequence._sequence_manager_factory import _SequenceManagerFactory
  53. from taipy.core.sequence.sequence import Sequence
  54. from taipy.core.sequence.sequence_id import SequenceId
  55. from taipy.core.submission._submission_manager_factory import _SubmissionManagerFactory
  56. from taipy.core.submission.submission import Submission
  57. from taipy.core.task._task_manager_factory import _TaskManagerFactory
  58. from taipy.core.task.task import Task, TaskId
  59. current_time = datetime.now()
  60. @pytest.fixture(scope="function")
  61. def csv_file(tmpdir_factory) -> str:
  62. csv = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  63. fn = tmpdir_factory.mktemp("data").join("df.csv")
  64. csv.to_csv(str(fn), index=False)
  65. return fn.strpath
  66. @pytest.fixture(scope="function")
  67. def excel_file(tmpdir_factory) -> str:
  68. excel = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  69. fn = tmpdir_factory.mktemp("data").join("df.xlsx")
  70. excel.to_excel(str(fn), index=False)
  71. return fn.strpath
  72. @pytest.fixture(scope="function")
  73. def excel_file_with_sheet_name(tmpdir_factory) -> str:
  74. excel = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  75. fn = tmpdir_factory.mktemp("data").join("df.xlsx")
  76. excel.to_excel(str(fn), sheet_name="sheet_name", index=False)
  77. return fn.strpath
  78. @pytest.fixture(scope="function")
  79. def json_file(tmpdir_factory) -> str:
  80. json_data = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  81. fn = tmpdir_factory.mktemp("data").join("df.json")
  82. json_data.to_json(str(fn), orient="records")
  83. return fn.strpath
  84. @pytest.fixture(scope="function")
  85. def excel_file_with_multi_sheet(tmpdir_factory) -> str:
  86. excel_multi_sheet = {
  87. "Sheet1": pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}]),
  88. "Sheet2": pd.DataFrame([{"a": 7, "b": 8, "c": 9}, {"a": 10, "b": 11, "c": 12}]),
  89. }
  90. fn = tmpdir_factory.mktemp("data").join("df.xlsx")
  91. with pd.ExcelWriter(str(fn)) as writer:
  92. for key in excel_multi_sheet.keys():
  93. excel_multi_sheet[key].to_excel(writer, key, index=False)
  94. return fn.strpath
  95. @pytest.fixture(scope="function")
  96. def pickle_file_path(tmpdir_factory) -> str:
  97. data = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  98. fn = tmpdir_factory.mktemp("data").join("df.p")
  99. with open(str(fn), "wb") as f:
  100. pickle.dump(data, f)
  101. return fn.strpath
  102. @pytest.fixture(scope="function")
  103. def parquet_file_path(tmpdir_factory) -> str:
  104. data = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  105. fn = tmpdir_factory.mktemp("data").join("df.parquet")
  106. data.to_parquet(str(fn))
  107. return fn.strpath
  108. @pytest.fixture(scope="function")
  109. def tmp_sqlite_db_file_path(tmpdir_factory):
  110. fn = tmpdir_factory.mktemp("data")
  111. db_name = "df"
  112. file_extension = ".db"
  113. db = create_engine("sqlite:///" + os.path.join(fn.strpath, f"{db_name}{file_extension}"))
  114. conn = db.connect()
  115. conn.execute(text("CREATE TABLE example (foo int, bar int);"))
  116. conn.execute(text("INSERT INTO example (foo, bar) VALUES (1, 2);"))
  117. conn.execute(text("INSERT INTO example (foo, bar) VALUES (3, 4);"))
  118. conn.commit()
  119. conn.close()
  120. db.dispose()
  121. return fn.strpath, db_name, file_extension
  122. @pytest.fixture(scope="function")
  123. def tmp_sqlite_sqlite3_file_path(tmpdir_factory):
  124. fn = tmpdir_factory.mktemp("data")
  125. db_name = "df"
  126. file_extension = ".sqlite3"
  127. db = create_engine("sqlite:///" + os.path.join(fn.strpath, f"{db_name}{file_extension}"))
  128. conn = db.connect()
  129. conn.execute(text("CREATE TABLE example (foo int, bar int);"))
  130. conn.execute(text("INSERT INTO example (foo, bar) VALUES (1, 2);"))
  131. conn.execute(text("INSERT INTO example (foo, bar) VALUES (3, 4);"))
  132. conn.commit()
  133. conn.close()
  134. db.dispose()
  135. return fn.strpath, db_name, file_extension
  136. @pytest.fixture(scope="function")
  137. def default_data_frame():
  138. return pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  139. @pytest.fixture(scope="function")
  140. def default_multi_sheet_data_frame():
  141. return {
  142. "Sheet1": pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}]),
  143. "Sheet2": pd.DataFrame([{"a": 7, "b": 8, "c": 9}, {"a": 10, "b": 11, "c": 12}]),
  144. }
  145. @pytest.fixture(scope="function")
  146. def current_datetime():
  147. return current_time
  148. @pytest.fixture(scope="function")
  149. def scenario(cycle):
  150. return Scenario(
  151. "sc",
  152. set(),
  153. {},
  154. set(),
  155. ScenarioId("SCENARIO_scenario_id"),
  156. current_time,
  157. is_primary=False,
  158. tags={"foo"},
  159. version="random_version_number",
  160. cycle=None,
  161. )
  162. @pytest.fixture(scope="function")
  163. def data_node():
  164. return InMemoryDataNode(
  165. "data_node_config_id", Scope.SCENARIO, version="random_version_number", id=DataNodeId("DATANODE_data_node_id")
  166. )
  167. @pytest.fixture(scope="function")
  168. def data_node_model():
  169. return _DataNodeModel(
  170. "my_dn_id",
  171. "test_data_node",
  172. Scope.SCENARIO,
  173. "csv",
  174. "name",
  175. "owner_id",
  176. list({"parent_id_1", "parent_id_2"}),
  177. datetime(1985, 10, 14, 2, 30, 0).isoformat(),
  178. [{"timestamp": datetime(1985, 10, 14, 2, 30, 0).isoformat(), "job_id": "job_id"}],
  179. "latest",
  180. None,
  181. None,
  182. False,
  183. {"path": "/path", "has_header": True, "prop": "ENV[FOO]", "exposed_type": "pandas"},
  184. )
  185. @pytest.fixture(scope="function")
  186. def task(data_node):
  187. _DataManagerFactory._build_manager()._repository._save(data_node)
  188. dn = InMemoryDataNode("dn_config_id", Scope.SCENARIO, version="random_version_number")
  189. _DataManagerFactory._build_manager()._repository._save(dn)
  190. return Task("task_config_id", {}, print, [data_node], [dn], TaskId("TASK_task_id"))
  191. @pytest.fixture(scope="function")
  192. def scenario_model(cycle):
  193. return _ScenarioModel(
  194. ScenarioId("sc_id"),
  195. "sc",
  196. set(),
  197. set(),
  198. {},
  199. creation_date=current_time.isoformat(),
  200. primary_scenario=False,
  201. subscribers=[],
  202. tags=["foo"],
  203. version="random_version_number",
  204. cycle=None,
  205. )
  206. @pytest.fixture(scope="function")
  207. def cycle():
  208. example_date = datetime.fromisoformat("2021-11-11T11:11:01.000001")
  209. return Cycle(
  210. Frequency.DAILY,
  211. {},
  212. creation_date=example_date,
  213. start_date=example_date,
  214. end_date=example_date,
  215. name="cc",
  216. id=CycleId("CYCLE_cycle_id"),
  217. )
  218. @pytest.fixture(scope="class")
  219. def sequence():
  220. return Sequence(
  221. {},
  222. [],
  223. SequenceId("sequence_id"),
  224. owner_id="owner_id",
  225. parent_ids={"parent_id_1", "parent_id_2"},
  226. version="random_version_number",
  227. )
  228. @pytest.fixture(scope="function")
  229. def job(task):
  230. return Job(JobId("job"), task, "foo", "bar", version="random_version_number")
  231. @pytest.fixture(scope="function")
  232. def submission(task):
  233. return Submission(task.id, task._ID_PREFIX, task.config_id, properties={})
  234. @pytest.fixture(scope="function")
  235. def _version():
  236. return _Version(id="foo", config=Config._applied_config)
  237. @pytest.fixture(scope="function")
  238. def cycle_model():
  239. return _CycleModel(
  240. CycleId("cc_id"),
  241. "cc",
  242. Frequency.DAILY,
  243. {},
  244. creation_date="2021-11-11T11:11:01.000001",
  245. start_date="2021-11-11T11:11:01.000001",
  246. end_date="2021-11-11T11:11:01.000001",
  247. )
  248. @pytest.fixture(scope="function")
  249. def tmp_sqlite(tmpdir_factory):
  250. fn = tmpdir_factory.mktemp("db")
  251. return os.path.join(fn.strpath, "test.db")
  252. @pytest.fixture(scope="session", autouse=True)
  253. def cleanup_files():
  254. for path in [".data", ".my_data", "user_data", ".taipy"]:
  255. if os.path.exists(path):
  256. shutil.rmtree(path, ignore_errors=True)
  257. yield
  258. for path in [".data", ".my_data", "user_data", ".taipy"]:
  259. if os.path.exists(path):
  260. shutil.rmtree(path, ignore_errors=True)
  261. @pytest.fixture(scope="function", autouse=True)
  262. def clean_core(init_config, init_managers, init_orchestrator, init_notifier, clean_argparser):
  263. clean_argparser()
  264. close_all_sessions()
  265. init_config()
  266. init_orchestrator()
  267. init_managers()
  268. init_config()
  269. init_notifier()
  270. with patch("sys.argv", ["prog"]):
  271. yield
  272. clean_argparser()
  273. close_all_sessions()
  274. init_orchestrator()
  275. init_managers()
  276. init_config()
  277. init_notifier()
  278. @pytest.fixture
  279. def init_config(reset_configuration_singleton, inject_core_sections):
  280. def _init_config():
  281. reset_configuration_singleton()
  282. inject_core_sections()
  283. _Checker.add_checker(_ConfigIdChecker)
  284. _Checker.add_checker(_CoreSectionChecker)
  285. _Checker.add_checker(_DataNodeConfigChecker)
  286. _Checker.add_checker(_JobConfigChecker)
  287. _Checker.add_checker(_TaskConfigChecker)
  288. _Checker.add_checker(_ScenarioConfigChecker)
  289. Config.configure_core(read_entity_retry=0)
  290. Orchestrator._is_running = False
  291. Orchestrator._version_is_initialized = False
  292. return _init_config
  293. @pytest.fixture
  294. def init_managers():
  295. def _init_managers():
  296. _CycleManagerFactory._build_manager()._delete_all()
  297. _ScenarioManagerFactory._build_manager()._delete_all()
  298. _SequenceManagerFactory._build_manager()._delete_all()
  299. _JobManagerFactory._build_manager()._delete_all()
  300. _TaskManagerFactory._build_manager()._delete_all()
  301. _DataManagerFactory._build_manager()._delete_all()
  302. _VersionManagerFactory._build_manager()._delete_all()
  303. _SubmissionManagerFactory._build_manager()._delete_all()
  304. return _init_managers
  305. @pytest.fixture
  306. def init_orchestrator():
  307. def _init_orchestrator():
  308. _OrchestratorFactory._remove_dispatcher()
  309. if _OrchestratorFactory._orchestrator is None:
  310. _OrchestratorFactory._build_orchestrator()
  311. _OrchestratorFactory._build_dispatcher(force_restart=True)
  312. _OrchestratorFactory._orchestrator.jobs_to_run = Queue()
  313. _OrchestratorFactory._orchestrator.blocked_jobs = []
  314. return _init_orchestrator
  315. @pytest.fixture
  316. def init_notifier():
  317. def _init_notifier():
  318. Notifier._topics_registrations_list = {}
  319. return _init_notifier
  320. @pytest.fixture
  321. def sql_engine():
  322. return create_engine("sqlite:///:memory:")