conftest.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import pickle
  13. import shutil
  14. from datetime import datetime
  15. from queue import Queue
  16. from unittest.mock import patch
  17. import pandas as pd
  18. import pytest
  19. from sqlalchemy import create_engine, text
  20. from sqlalchemy.orm import close_all_sessions
  21. from taipy.config.checker._checker import _Checker
  22. from taipy.config.common.frequency import Frequency
  23. from taipy.config.common.scope import Scope
  24. from taipy.config.config import Config
  25. from taipy.core._core import Core
  26. from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
  27. from taipy.core._repository.db._sql_connection import _SQLConnection
  28. from taipy.core._version._version import _Version
  29. from taipy.core._version._version_manager_factory import _VersionManagerFactory
  30. from taipy.core.config import (
  31. _ConfigIdChecker,
  32. _CoreSectionChecker,
  33. _DataNodeConfigChecker,
  34. _JobConfigChecker,
  35. _ScenarioConfigChecker,
  36. _TaskConfigChecker,
  37. )
  38. from taipy.core.cycle._cycle_manager_factory import _CycleManagerFactory
  39. from taipy.core.cycle._cycle_model import _CycleModel
  40. from taipy.core.cycle.cycle import Cycle
  41. from taipy.core.cycle.cycle_id import CycleId
  42. from taipy.core.data._data_manager_factory import _DataManagerFactory
  43. from taipy.core.data._data_model import _DataNodeModel
  44. from taipy.core.data.in_memory import InMemoryDataNode
  45. from taipy.core.job._job_manager_factory import _JobManagerFactory
  46. from taipy.core.job.job import Job
  47. from taipy.core.job.job_id import JobId
  48. from taipy.core.notification.notifier import Notifier
  49. from taipy.core.scenario._scenario_manager_factory import _ScenarioManagerFactory
  50. from taipy.core.scenario._scenario_model import _ScenarioModel
  51. from taipy.core.scenario.scenario import Scenario
  52. from taipy.core.scenario.scenario_id import ScenarioId
  53. from taipy.core.sequence._sequence_manager_factory import _SequenceManagerFactory
  54. from taipy.core.sequence.sequence import Sequence
  55. from taipy.core.sequence.sequence_id import SequenceId
  56. from taipy.core.submission._submission_manager_factory import _SubmissionManagerFactory
  57. from taipy.core.submission.submission import Submission
  58. from taipy.core.task._task_manager_factory import _TaskManagerFactory
  59. from taipy.core.task.task import Task
  60. current_time = datetime.now()
  61. @pytest.fixture(scope="function")
  62. def csv_file(tmpdir_factory) -> str:
  63. csv = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  64. fn = tmpdir_factory.mktemp("data").join("df.csv")
  65. csv.to_csv(str(fn), index=False)
  66. return fn.strpath
  67. @pytest.fixture(scope="function")
  68. def excel_file(tmpdir_factory) -> str:
  69. excel = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  70. fn = tmpdir_factory.mktemp("data").join("df.xlsx")
  71. excel.to_excel(str(fn), index=False)
  72. return fn.strpath
  73. @pytest.fixture(scope="function")
  74. def excel_file_with_sheet_name(tmpdir_factory) -> str:
  75. excel = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  76. fn = tmpdir_factory.mktemp("data").join("df.xlsx")
  77. excel.to_excel(str(fn), sheet_name="sheet_name", index=False)
  78. return fn.strpath
  79. @pytest.fixture(scope="function")
  80. def json_file(tmpdir_factory) -> str:
  81. json_data = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  82. fn = tmpdir_factory.mktemp("data").join("df.json")
  83. json_data.to_json(str(fn), orient="records")
  84. return fn.strpath
  85. @pytest.fixture(scope="function")
  86. def excel_file_with_multi_sheet(tmpdir_factory) -> str:
  87. excel_multi_sheet = {
  88. "Sheet1": pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}]),
  89. "Sheet2": pd.DataFrame([{"a": 7, "b": 8, "c": 9}, {"a": 10, "b": 11, "c": 12}]),
  90. }
  91. fn = tmpdir_factory.mktemp("data").join("df.xlsx")
  92. with pd.ExcelWriter(str(fn)) as writer:
  93. for key in excel_multi_sheet.keys():
  94. excel_multi_sheet[key].to_excel(writer, key, index=False)
  95. return fn.strpath
  96. @pytest.fixture(scope="function")
  97. def pickle_file_path(tmpdir_factory) -> str:
  98. data = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  99. fn = tmpdir_factory.mktemp("data").join("df.p")
  100. with open(str(fn), "wb") as f:
  101. pickle.dump(data, f)
  102. return fn.strpath
  103. @pytest.fixture(scope="function")
  104. def parquet_file_path(tmpdir_factory) -> str:
  105. data = pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  106. fn = tmpdir_factory.mktemp("data").join("df.parquet")
  107. data.to_parquet(str(fn))
  108. return fn.strpath
  109. @pytest.fixture(scope="function")
  110. def tmp_sqlite_db_file_path(tmpdir_factory):
  111. fn = tmpdir_factory.mktemp("data")
  112. db_name = "df"
  113. file_extension = ".db"
  114. db = create_engine("sqlite:///" + os.path.join(fn.strpath, f"{db_name}{file_extension}"))
  115. conn = db.connect()
  116. conn.execute(text("CREATE TABLE example (foo int, bar int);"))
  117. conn.execute(text("INSERT INTO example (foo, bar) VALUES (1, 2);"))
  118. conn.execute(text("INSERT INTO example (foo, bar) VALUES (3, 4);"))
  119. conn.commit()
  120. conn.close()
  121. db.dispose()
  122. return fn.strpath, db_name, file_extension
  123. @pytest.fixture(scope="function")
  124. def tmp_sqlite_sqlite3_file_path(tmpdir_factory):
  125. fn = tmpdir_factory.mktemp("data")
  126. db_name = "df"
  127. file_extension = ".sqlite3"
  128. db = create_engine("sqlite:///" + os.path.join(fn.strpath, f"{db_name}{file_extension}"))
  129. conn = db.connect()
  130. conn.execute(text("CREATE TABLE example (foo int, bar int);"))
  131. conn.execute(text("INSERT INTO example (foo, bar) VALUES (1, 2);"))
  132. conn.execute(text("INSERT INTO example (foo, bar) VALUES (3, 4);"))
  133. conn.commit()
  134. conn.close()
  135. db.dispose()
  136. return fn.strpath, db_name, file_extension
  137. @pytest.fixture(scope="function")
  138. def default_data_frame():
  139. return pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}])
  140. @pytest.fixture(scope="function")
  141. def default_multi_sheet_data_frame():
  142. return {
  143. "Sheet1": pd.DataFrame([{"a": 1, "b": 2, "c": 3}, {"a": 4, "b": 5, "c": 6}]),
  144. "Sheet2": pd.DataFrame([{"a": 7, "b": 8, "c": 9}, {"a": 10, "b": 11, "c": 12}]),
  145. }
  146. @pytest.fixture(scope="session", autouse=True)
  147. def cleanup_files():
  148. yield
  149. if os.path.exists(".data"):
  150. shutil.rmtree(".data", ignore_errors=True)
  151. if os.path.exists("user_data"):
  152. shutil.rmtree("user_data", ignore_errors=True)
  153. if os.path.exists(".taipy"):
  154. shutil.rmtree(".taipy", ignore_errors=True)
  155. if os.path.exists(".my_data"):
  156. shutil.rmtree(".my_data", ignore_errors=True)
  157. @pytest.fixture(scope="function")
  158. def current_datetime():
  159. return current_time
  160. @pytest.fixture(scope="function")
  161. def scenario(cycle):
  162. return Scenario(
  163. "sc",
  164. set(),
  165. {},
  166. set(),
  167. ScenarioId("sc_id"),
  168. current_time,
  169. is_primary=False,
  170. tags={"foo"},
  171. version="random_version_number",
  172. cycle=None,
  173. )
  174. @pytest.fixture(scope="function")
  175. def data_node():
  176. return InMemoryDataNode("data_node_config_id", Scope.SCENARIO, version="random_version_number")
  177. @pytest.fixture(scope="function")
  178. def data_node_model():
  179. return _DataNodeModel(
  180. "my_dn_id",
  181. "test_data_node",
  182. Scope.SCENARIO,
  183. "csv",
  184. "name",
  185. "owner_id",
  186. list({"parent_id_1", "parent_id_2"}),
  187. datetime(1985, 10, 14, 2, 30, 0).isoformat(),
  188. [dict(timestamp=datetime(1985, 10, 14, 2, 30, 0).isoformat(), job_id="job_id")],
  189. "latest",
  190. None,
  191. None,
  192. False,
  193. {"path": "/path", "has_header": True, "prop": "ENV[FOO]", "exposed_type": "pandas"},
  194. )
  195. @pytest.fixture(scope="function")
  196. def task(data_node):
  197. dn = InMemoryDataNode("dn_config_id", Scope.SCENARIO, version="random_version_number")
  198. return Task("task_config_id", {}, print, [data_node], [dn])
  199. @pytest.fixture(scope="function")
  200. def scenario_model(cycle):
  201. return _ScenarioModel(
  202. ScenarioId("sc_id"),
  203. "sc",
  204. set(),
  205. set(),
  206. {},
  207. creation_date=current_time.isoformat(),
  208. primary_scenario=False,
  209. subscribers=[],
  210. tags=["foo"],
  211. version="random_version_number",
  212. cycle=None,
  213. )
  214. @pytest.fixture(scope="function")
  215. def cycle():
  216. example_date = datetime.fromisoformat("2021-11-11T11:11:01.000001")
  217. return Cycle(
  218. Frequency.DAILY,
  219. {},
  220. creation_date=example_date,
  221. start_date=example_date,
  222. end_date=example_date,
  223. name="cc",
  224. id=CycleId("cc_id"),
  225. )
  226. @pytest.fixture(scope="class")
  227. def sequence():
  228. return Sequence(
  229. {},
  230. [],
  231. SequenceId("sequence_id"),
  232. owner_id="owner_id",
  233. parent_ids=set(["parent_id_1", "parent_id_2"]),
  234. version="random_version_number",
  235. )
  236. @pytest.fixture(scope="function")
  237. def job(task):
  238. return Job(JobId("job"), task, "foo", "bar", version="random_version_number")
  239. @pytest.fixture(scope="function")
  240. def submission(task):
  241. return Submission(task.id, task._ID_PREFIX, task.config_id, properties={})
  242. @pytest.fixture(scope="function")
  243. def _version():
  244. return _Version(id="foo", config=Config._applied_config)
  245. @pytest.fixture(scope="function")
  246. def cycle_model():
  247. return _CycleModel(
  248. CycleId("cc_id"),
  249. "cc",
  250. Frequency.DAILY,
  251. {},
  252. creation_date="2021-11-11T11:11:01.000001",
  253. start_date="2021-11-11T11:11:01.000001",
  254. end_date="2021-11-11T11:11:01.000001",
  255. )
  256. @pytest.fixture(scope="function")
  257. def tmp_sqlite(tmpdir_factory):
  258. fn = tmpdir_factory.mktemp("db")
  259. return os.path.join(fn.strpath, "test.db")
  260. @pytest.fixture(scope="function", autouse=True)
  261. def clean_repository(init_config, init_managers, init_orchestrator, init_notifier):
  262. close_all_sessions()
  263. init_config()
  264. init_orchestrator()
  265. init_managers()
  266. init_config()
  267. init_notifier()
  268. with patch("sys.argv", ["prog"]):
  269. yield
  270. @pytest.fixture
  271. def init_config(reset_configuration_singleton, inject_core_sections):
  272. def _init_config():
  273. reset_configuration_singleton()
  274. inject_core_sections()
  275. _Checker.add_checker(_ConfigIdChecker)
  276. _Checker.add_checker(_CoreSectionChecker)
  277. _Checker.add_checker(_DataNodeConfigChecker)
  278. _Checker.add_checker(_JobConfigChecker)
  279. # We don't need to add _MigrationConfigChecker because it is run only when the Core service is run.
  280. _Checker.add_checker(_TaskConfigChecker)
  281. _Checker.add_checker(_ScenarioConfigChecker)
  282. Config.configure_core(read_entity_retry=0)
  283. Core._is_running = False
  284. Core._version_is_initialized = False
  285. return _init_config
  286. @pytest.fixture
  287. def init_managers():
  288. def _init_managers():
  289. _CycleManagerFactory._build_manager()._delete_all()
  290. _ScenarioManagerFactory._build_manager()._delete_all()
  291. _SequenceManagerFactory._build_manager()._delete_all()
  292. _JobManagerFactory._build_manager()._delete_all()
  293. _TaskManagerFactory._build_manager()._delete_all()
  294. _DataManagerFactory._build_manager()._delete_all()
  295. _VersionManagerFactory._build_manager()._delete_all()
  296. _SubmissionManagerFactory._build_manager()._delete_all()
  297. return _init_managers
  298. @pytest.fixture
  299. def init_orchestrator():
  300. def _init_orchestrator():
  301. _OrchestratorFactory._remove_dispatcher()
  302. if _OrchestratorFactory._orchestrator is None:
  303. _OrchestratorFactory._build_orchestrator()
  304. _OrchestratorFactory._build_dispatcher(force_restart=True)
  305. _OrchestratorFactory._orchestrator.jobs_to_run = Queue()
  306. _OrchestratorFactory._orchestrator.blocked_jobs = []
  307. return _init_orchestrator
  308. @pytest.fixture
  309. def init_notifier():
  310. def _init_notifier():
  311. Notifier._topics_registrations_list = {}
  312. return _init_notifier
  313. @pytest.fixture
  314. def sql_engine():
  315. return create_engine("sqlite:///:memory:")
  316. @pytest.fixture
  317. def init_sql_repo(tmp_sqlite, init_managers):
  318. Config.configure_core(repository_type="sql", repository_properties={"db_location": tmp_sqlite})
  319. # Clean SQLite database
  320. if _SQLConnection._connection:
  321. _SQLConnection._connection.close()
  322. _SQLConnection._connection = None
  323. _SQLConnection.init_db()
  324. init_managers()
  325. return tmp_sqlite