1
0

conftest.py 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import shutil
  13. import uuid
  14. from datetime import datetime, timedelta
  15. from queue import Queue
  16. import pandas as pd
  17. import pytest
  18. from dotenv import load_dotenv
  19. from taipy.common.config import Config
  20. from taipy.common.config.common.frequency import Frequency
  21. from taipy.common.config.common.scope import Scope
  22. from taipy.core import Cycle, DataNodeId, Job, JobId, Scenario, Sequence, Task
  23. from taipy.core._orchestrator._orchestrator_factory import _OrchestratorFactory
  24. from taipy.core.cycle._cycle_manager import _CycleManager
  25. from taipy.core.data.pickle import PickleDataNode
  26. from taipy.core.job._job_manager import _JobManager
  27. from taipy.core.task._task_manager import _TaskManager
  28. from taipy.rest.app import create_app
  29. from .setup.shared.algorithms import evaluate, forecast
  30. @pytest.fixture
  31. def setup_end_to_end():
  32. model_cfg = Config.configure_data_node("model", path="setup/my_model.p", storage_type="pickle")
  33. day_cfg = Config.configure_data_node(id="day")
  34. forecasts_cfg = Config.configure_data_node(id="forecasts")
  35. forecast_task_cfg = Config.configure_task(
  36. id="forecast_task",
  37. input=[model_cfg, day_cfg],
  38. function=forecast,
  39. output=forecasts_cfg,
  40. )
  41. historical_temperature_cfg = Config.configure_data_node(
  42. "historical_temperature",
  43. storage_type="csv",
  44. path="setup/historical_temperature.csv",
  45. has_header=True,
  46. )
  47. evaluation_cfg = Config.configure_data_node("evaluation")
  48. evaluate_task_cfg = Config.configure_task(
  49. "evaluate_task",
  50. input=[historical_temperature_cfg, forecasts_cfg, day_cfg],
  51. function=evaluate,
  52. output=evaluation_cfg,
  53. )
  54. scenario_config = Config.configure_scenario(
  55. "scenario", [forecast_task_cfg, evaluate_task_cfg], frequency=Frequency.DAILY
  56. )
  57. scenario_config.add_sequences({"sequence": [forecast_task_cfg, evaluate_task_cfg]})
  58. @pytest.fixture()
  59. def app():
  60. load_dotenv(".testenv")
  61. app = create_app(testing=True)
  62. app.config.update(
  63. {
  64. "TESTING": True,
  65. }
  66. )
  67. with app.app_context(), app.test_request_context():
  68. yield app
  69. @pytest.fixture()
  70. def client(app):
  71. return app.test_client()
  72. @pytest.fixture
  73. def datanode_data():
  74. return {
  75. "name": "foo",
  76. "storage_type": "in_memory",
  77. "scope": "scenario",
  78. "default_data": ["1991-01-01T00:00:00"],
  79. }
  80. @pytest.fixture
  81. def task_data():
  82. return {
  83. "config_id": "foo",
  84. "input_ids": ["DATASOURCE_foo_3b888e17-1974-4a56-a42c-c7c96bc9cd54"],
  85. "function_name": "print",
  86. "function_module": "builtins",
  87. "output_ids": ["DATASOURCE_foo_4d9923b8-eb9f-4f3c-8055-3a1ce8bee309"],
  88. }
  89. @pytest.fixture
  90. def sequence_data():
  91. return {
  92. "name": "foo",
  93. "task_ids": ["TASK_foo_3b888e17-1974-4a56-a42c-c7c96bc9cd54"],
  94. }
  95. @pytest.fixture
  96. def scenario_data():
  97. return {
  98. "name": "foo",
  99. "sequence_ids": ["SEQUENCE_foo_3b888e17-1974-4a56-a42c-c7c96bc9cd54"],
  100. "properties": {},
  101. }
  102. @pytest.fixture
  103. def default_datanode():
  104. return PickleDataNode(
  105. "input_ds",
  106. Scope.SCENARIO,
  107. DataNodeId("f"),
  108. "owner_id",
  109. None,
  110. properties={"default_data": [1, 2, 3, 4, 5, 6]},
  111. )
  112. @pytest.fixture
  113. def default_df_datanode():
  114. return PickleDataNode(
  115. "input_ds",
  116. Scope.SCENARIO,
  117. DataNodeId("id_uio2"),
  118. "owner_id",
  119. None,
  120. properties={"default_data": pd.DataFrame([{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}])},
  121. )
  122. @pytest.fixture
  123. def default_datanode_config():
  124. return Config.configure_data_node(f"taipy_{uuid.uuid4().hex}", "in_memory", Scope.SCENARIO)
  125. @pytest.fixture
  126. def default_datanode_config_list():
  127. configs = []
  128. for i in range(10):
  129. configs.append(Config.configure_data_node(id=f"ds_{i}", storage_type="in_memory", scope=Scope.SCENARIO))
  130. return configs
  131. def __default_task():
  132. input_ds = PickleDataNode(
  133. "input_ds",
  134. Scope.SCENARIO,
  135. DataNodeId("id_uio"),
  136. "owner_id",
  137. {"TASK_task_id"},
  138. properties={"default_data": "In memory Data Source"},
  139. )
  140. output_ds = PickleDataNode(
  141. "output_ds",
  142. Scope.SCENARIO,
  143. DataNodeId("id_uio"),
  144. "owner_id",
  145. {"TASK_task_id"},
  146. properties={"default_data": "In memory Data Source"},
  147. )
  148. return Task(
  149. config_id="foo",
  150. properties={},
  151. function=print,
  152. input=[input_ds],
  153. output=[output_ds],
  154. id="TASK_task_id",
  155. )
  156. @pytest.fixture
  157. def default_task():
  158. return __default_task()
  159. @pytest.fixture
  160. def default_task_config():
  161. return Config.configure_task("task1", print, [], [])
  162. @pytest.fixture
  163. def default_task_config_list():
  164. configs = []
  165. for i in range(10):
  166. configs.append(Config.configure_task(f"task_{i}", print, [], []))
  167. return configs
  168. def __default_sequence():
  169. return Sequence(properties={"name": "foo"}, tasks=[__default_task()], sequence_id="SEQUENCE_foo_SCENARIO_acb")
  170. def __task_config():
  171. return Config.configure_task("task1", print, [], [])
  172. @pytest.fixture
  173. def default_sequence():
  174. return __default_sequence()
  175. @pytest.fixture
  176. def default_scenario_config():
  177. task_config = __task_config()
  178. scenario_config = Config.configure_scenario(
  179. f"taipy_{uuid.uuid4().hex}",
  180. [task_config],
  181. )
  182. scenario_config.add_sequences({"sequence": [task_config]})
  183. return scenario_config
  184. @pytest.fixture
  185. def default_scenario_config_list():
  186. configs = []
  187. for _ in range(10):
  188. task_config = Config.configure_task(f"taipy_{uuid.uuid4().hex}", print)
  189. scenario_config = Config.configure_scenario(
  190. f"taipy_{uuid.uuid4().hex}",
  191. [task_config],
  192. )
  193. scenario_config.add_sequences({"sequence": [task_config]})
  194. configs.append(scenario_config)
  195. return configs
  196. @pytest.fixture
  197. def default_scenario():
  198. return Scenario(config_id="foo", properties={}, tasks=[__default_task()], scenario_id="SCENARIO_scenario_id")
  199. def __create_cycle(name="foo"):
  200. now = datetime.now()
  201. return Cycle(
  202. name=name,
  203. frequency=Frequency.DAILY,
  204. properties={},
  205. creation_date=now,
  206. start_date=now,
  207. end_date=now + timedelta(days=5),
  208. )
  209. @pytest.fixture
  210. def create_cycle_list():
  211. cycles = []
  212. manager = _CycleManager
  213. for i in range(10):
  214. c = __create_cycle(f"cycle_{i}")
  215. manager._set(c)
  216. return cycles
  217. @pytest.fixture
  218. def cycle_data():
  219. return {
  220. "name": "foo",
  221. "frequency": "daily",
  222. "properties": {},
  223. "creation_date": "2022-02-03T22:17:27.317114",
  224. "start_date": "2022-02-03T22:17:27.317114",
  225. "end_date": "2022-02-08T22:17:27.317114",
  226. }
  227. @pytest.fixture
  228. def default_cycle():
  229. return __create_cycle()
  230. def __create_job():
  231. task_manager = _TaskManager
  232. task = __default_task()
  233. task_manager._set(task)
  234. submit_id = f"SUBMISSION_{str(uuid.uuid4())}"
  235. return Job(id=JobId(f"JOB_{uuid.uuid4()}"), task=task, submit_id=submit_id, submit_entity_id=task.id)
  236. @pytest.fixture
  237. def default_job():
  238. return __create_job()
  239. @pytest.fixture
  240. def create_job_list():
  241. jobs = []
  242. manager = _JobManager
  243. for _ in range(10):
  244. c = __create_job()
  245. manager._set(c)
  246. return jobs
  247. @pytest.fixture(scope="function", autouse=True)
  248. def cleanup_files(reset_configuration_singleton, inject_core_sections):
  249. reset_configuration_singleton()
  250. inject_core_sections()
  251. Config.configure_core(repository_type="filesystem")
  252. if os.path.exists(".data"):
  253. shutil.rmtree(".data", ignore_errors=True)
  254. if os.path.exists(".my_data"):
  255. shutil.rmtree(".my_data", ignore_errors=True)
  256. yield
  257. for path in [".data", ".my_data", "user_data", ".taipy"]:
  258. if os.path.exists(path):
  259. shutil.rmtree(path, ignore_errors=True)
  260. @pytest.fixture
  261. def init_orchestrator():
  262. def _init_orchestrator():
  263. _OrchestratorFactory._remove_dispatcher()
  264. if _OrchestratorFactory._orchestrator is None:
  265. _OrchestratorFactory._build_orchestrator()
  266. _OrchestratorFactory._build_dispatcher(force_restart=True)
  267. _OrchestratorFactory._orchestrator.jobs_to_run = Queue()
  268. _OrchestratorFactory._orchestrator.blocked_jobs = []
  269. return _init_orchestrator