conftest.py 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348
  1. # Copyright 2021-2025 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import shutil
  13. import uuid
  14. from datetime import datetime, timedelta
  15. from queue import Queue
  16. import pandas as pd
  17. import pytest
  18. from dotenv import load_dotenv
  19. from taipy.common.config import Config
  20. from taipy.common.config.checker._checker import _Checker
  21. from taipy.core import Cycle, DataNodeId, Job, JobId, Scenario, Sequence, Task
  22. from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
  23. from taipy.core.common.frequency import Frequency
  24. from taipy.core.common.scope import Scope
  25. from taipy.core.cycle._cycle_manager import _CycleManager
  26. from taipy.core.data._data_manager_factory import _DataManagerFactory
  27. from taipy.core.data.pickle import PickleDataNode
  28. from taipy.core.job._job_manager import _JobManager
  29. from taipy.core.task._task_manager import _TaskManager
  30. from taipy.rest.app import create_app
  31. from taipy.rest.config import _RestConfigChecker
  32. from .setup.shared.algorithms import evaluate, forecast
  33. @pytest.fixture
  34. def setup_end_to_end():
  35. model_cfg = Config.configure_data_node("model", path="setup/my_model.p", storage_type="pickle")
  36. day_cfg = Config.configure_data_node(id="day")
  37. forecasts_cfg = Config.configure_data_node(id="forecasts")
  38. forecast_task_cfg = Config.configure_task(
  39. id="forecast_task",
  40. input=[model_cfg, day_cfg],
  41. function=forecast,
  42. output=forecasts_cfg,
  43. )
  44. historical_temperature_cfg = Config.configure_data_node(
  45. "historical_temperature",
  46. storage_type="csv",
  47. path="setup/historical_temperature.csv",
  48. has_header=True,
  49. )
  50. evaluation_cfg = Config.configure_data_node("evaluation")
  51. evaluate_task_cfg = Config.configure_task(
  52. "evaluate_task",
  53. input=[historical_temperature_cfg, forecasts_cfg, day_cfg],
  54. function=evaluate,
  55. output=evaluation_cfg,
  56. )
  57. scenario_config = Config.configure_scenario(
  58. "scenario", [forecast_task_cfg, evaluate_task_cfg], frequency=Frequency.DAILY
  59. )
  60. scenario_config.add_sequences({"sequence": [forecast_task_cfg, evaluate_task_cfg]})
  61. @pytest.fixture()
  62. def app():
  63. load_dotenv(".testenv")
  64. app = create_app(testing=True)
  65. app.config.update(
  66. {
  67. "TESTING": True,
  68. }
  69. )
  70. with app.app_context(), app.test_request_context():
  71. yield app
  72. @pytest.fixture()
  73. def client(app):
  74. return app.test_client()
  75. @pytest.fixture
  76. def datanode_data():
  77. return {
  78. "name": "foo",
  79. "storage_type": "in_memory",
  80. "scope": "scenario",
  81. "default_data": ["1991-01-01T00:00:00"],
  82. }
  83. @pytest.fixture
  84. def task_data():
  85. return {
  86. "config_id": "foo",
  87. "input_ids": ["DATASOURCE_foo_3b888e17-1974-4a56-a42c-c7c96bc9cd54"],
  88. "function_name": "print",
  89. "function_module": "builtins",
  90. "output_ids": ["DATASOURCE_foo_4d9923b8-eb9f-4f3c-8055-3a1ce8bee309"],
  91. }
  92. @pytest.fixture
  93. def sequence_data():
  94. return {
  95. "name": "foo",
  96. "task_ids": ["TASK_foo_3b888e17-1974-4a56-a42c-c7c96bc9cd54"],
  97. }
  98. @pytest.fixture
  99. def scenario_data():
  100. return {
  101. "name": "foo",
  102. "sequence_ids": ["SEQUENCE_foo_3b888e17-1974-4a56-a42c-c7c96bc9cd54"],
  103. "properties": {},
  104. }
  105. @pytest.fixture
  106. def default_datanode():
  107. return PickleDataNode(
  108. "input_ds",
  109. Scope.SCENARIO,
  110. DataNodeId("f"),
  111. "owner_id",
  112. None,
  113. properties={"default_data": [1, 2, 3, 4, 5, 6]},
  114. )
  115. @pytest.fixture
  116. def default_df_datanode():
  117. return PickleDataNode(
  118. "input_ds",
  119. Scope.SCENARIO,
  120. DataNodeId("id_uio2"),
  121. "owner_id",
  122. None,
  123. properties={"default_data": pd.DataFrame([{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}])},
  124. )
  125. @pytest.fixture
  126. def default_datanode_config():
  127. return Config.configure_data_node(f"taipy_{uuid.uuid4().hex}", "in_memory", Scope.SCENARIO)
  128. @pytest.fixture
  129. def default_datanode_config_list():
  130. configs = []
  131. for i in range(10):
  132. configs.append(Config.configure_data_node(id=f"ds_{i}", storage_type="in_memory", scope=Scope.SCENARIO))
  133. return configs
  134. def __default_task():
  135. input_ds = PickleDataNode(
  136. "input_ds",
  137. Scope.SCENARIO,
  138. DataNodeId("id_uio"),
  139. "owner_id",
  140. {"TASK_task_id"},
  141. properties={"default_data": "In memory Data Source"},
  142. )
  143. _DataManagerFactory._build_manager()._repository._save(input_ds)
  144. output_ds = PickleDataNode(
  145. "output_ds",
  146. Scope.SCENARIO,
  147. DataNodeId("id_uio"),
  148. "owner_id",
  149. {"TASK_task_id"},
  150. properties={"default_data": "In memory Data Source"},
  151. )
  152. _DataManagerFactory._build_manager()._repository._save(output_ds)
  153. return Task(
  154. config_id="foo",
  155. properties={},
  156. function=print,
  157. input=[input_ds],
  158. output=[output_ds],
  159. id="TASK_task_id",
  160. )
  161. @pytest.fixture
  162. def default_task():
  163. return __default_task()
  164. @pytest.fixture
  165. def default_task_config():
  166. return Config.configure_task("task1", print, [], [])
  167. @pytest.fixture
  168. def default_task_config_list():
  169. configs = []
  170. for i in range(10):
  171. configs.append(Config.configure_task(f"task_{i}", print, [], []))
  172. return configs
  173. def __default_sequence():
  174. return Sequence(properties={"name": "foo"}, tasks=[__default_task()], sequence_id="SEQUENCE_foo_SCENARIO_acb")
  175. def __task_config():
  176. return Config.configure_task("task1", print, [], [])
  177. @pytest.fixture
  178. def default_sequence():
  179. return __default_sequence()
  180. @pytest.fixture
  181. def default_scenario_config():
  182. task_config = __task_config()
  183. scenario_config = Config.configure_scenario(
  184. f"taipy_{uuid.uuid4().hex}",
  185. [task_config],
  186. )
  187. scenario_config.add_sequences({"sequence": [task_config]})
  188. return scenario_config
  189. @pytest.fixture
  190. def default_scenario_config_list():
  191. configs = []
  192. for _ in range(10):
  193. task_config = Config.configure_task(f"taipy_{uuid.uuid4().hex}", print)
  194. scenario_config = Config.configure_scenario(
  195. f"taipy_{uuid.uuid4().hex}",
  196. [task_config],
  197. )
  198. scenario_config.add_sequences({"sequence": [task_config]})
  199. configs.append(scenario_config)
  200. return configs
  201. @pytest.fixture
  202. def default_scenario():
  203. return Scenario(config_id="foo", properties={}, tasks=[__default_task()], scenario_id="SCENARIO_scenario_id")
  204. def __create_cycle(name="foo"):
  205. now = datetime.now()
  206. return Cycle(
  207. name=name,
  208. frequency=Frequency.DAILY,
  209. properties={},
  210. creation_date=now,
  211. start_date=now,
  212. end_date=now + timedelta(days=5),
  213. )
  214. @pytest.fixture
  215. def create_cycle_list():
  216. cycles = []
  217. manager = _CycleManager
  218. for i in range(10):
  219. c = __create_cycle(f"cycle_{i}")
  220. manager._repository._save(c)
  221. return cycles
  222. @pytest.fixture
  223. def cycle_data():
  224. return {
  225. "name": "foo",
  226. "frequency": "daily",
  227. "properties": {},
  228. "creation_date": "2022-02-03T22:17:27.317114",
  229. "start_date": "2022-02-03T22:17:27.317114",
  230. "end_date": "2022-02-08T22:17:27.317114",
  231. }
  232. @pytest.fixture
  233. def default_cycle():
  234. return __create_cycle()
  235. def __create_job():
  236. task_manager = _TaskManager
  237. task = __default_task()
  238. task_manager._repository._save(task)
  239. submit_id = f"SUBMISSION_{str(uuid.uuid4())}"
  240. return Job(id=JobId(f"JOB_{uuid.uuid4()}"), task=task, submit_id=submit_id, submit_entity_id=task.id)
  241. @pytest.fixture
  242. def default_job():
  243. return __create_job()
  244. @pytest.fixture
  245. def create_job_list():
  246. jobs = []
  247. manager = _JobManager
  248. for _ in range(10):
  249. c = __create_job()
  250. manager._repository._save(c)
  251. return jobs
  252. @pytest.fixture
  253. def init_orchestrator():
  254. def _init_orchestrator():
  255. _OrchestratorFactory._remove_dispatcher()
  256. if _OrchestratorFactory._orchestrator is None:
  257. _OrchestratorFactory._build_orchestrator()
  258. _OrchestratorFactory._build_dispatcher(force_restart=True)
  259. _OrchestratorFactory._orchestrator.jobs_to_run = Queue()
  260. _OrchestratorFactory._orchestrator.blocked_jobs = []
  261. return _init_orchestrator
  262. @pytest.fixture(scope="function", autouse=True)
  263. def cleanup_files(reset_configuration_singleton, inject_rest_sections, inject_core_sections):
  264. reset_configuration_singleton()
  265. inject_core_sections()
  266. inject_rest_sections()
  267. _Checker.add_checker(_RestConfigChecker)
  268. Config.configure_core(repository_type="filesystem")
  269. if os.path.exists(".data"):
  270. shutil.rmtree(".data", ignore_errors=True)
  271. if os.path.exists(".my_data"):
  272. shutil.rmtree(".my_data", ignore_errors=True)
  273. yield
  274. for path in [".data", ".my_data", "user_data", ".taipy"]:
  275. if os.path.exists(path):
  276. shutil.rmtree(path, ignore_errors=True)