test_config_serialization.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801
  1. # Copyright 2023 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import datetime
  12. import json
  13. from src.taipy.core.config import CoreSection, DataNodeConfig, JobConfig, MigrationConfig, ScenarioConfig, TaskConfig
  14. from taipy.config import Config
  15. from taipy.config._serializer._json_serializer import _JsonSerializer
  16. from taipy.config.common.frequency import Frequency
  17. from taipy.config.common.scope import Scope
  18. from tests.core.utils.named_temporary_file import NamedTemporaryFile
  19. def multiply(a):
  20. return a * 2
  21. def migrate_csv_path(dn):
  22. dn.path = "foo.csv"
  23. def compare_function(*data_node_results):
  24. comparison_result = {}
  25. current_result_index = 0
  26. for current_result in data_node_results:
  27. comparison_result[current_result_index] = {}
  28. next_result_index = 0
  29. for next_result in data_node_results:
  30. print(f"comparing result {current_result_index} with result {next_result_index}")
  31. comparison_result[current_result_index][next_result_index] = next_result - current_result
  32. next_result_index += 1
  33. current_result_index += 1
  34. return comparison_result
  35. class CustomClass:
  36. a = None
  37. b = None
  38. class CustomEncoder(json.JSONEncoder):
  39. def default(self, o):
  40. if isinstance(o, datetime):
  41. result = {"__type__": "Datetime", "__value__": o.isoformat()}
  42. else:
  43. result = json.JSONEncoder.default(self, o)
  44. return result
  45. class CustomDecoder(json.JSONDecoder):
  46. def __init__(self, *args, **kwargs):
  47. json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
  48. def object_hook(self, source):
  49. if source.get("__type__") == "Datetime":
  50. return datetime.fromisoformat(source.get("__value__"))
  51. else:
  52. return source
  53. def config_test_scenario():
  54. test_csv_dn_cfg = Config.configure_csv_data_node(
  55. id="test_csv_dn",
  56. path="./test.csv",
  57. exposed_type=CustomClass,
  58. scope=Scope.GLOBAL,
  59. validity_period=datetime.timedelta(1),
  60. )
  61. test_json_dn_cfg = Config.configure_json_data_node(
  62. id="test_json_dn",
  63. default_path="./test.json",
  64. encoder=CustomEncoder,
  65. decoder=CustomDecoder,
  66. )
  67. test_pickle_dn_cfg = Config.configure_pickle_data_node(
  68. id="test_pickle_dn",
  69. path="./test.p",
  70. scope=Scope.SCENARIO,
  71. validity_period=datetime.timedelta(1),
  72. )
  73. test_task_cfg = Config.configure_task(
  74. id="test_task", input=test_csv_dn_cfg, function=multiply, output=test_json_dn_cfg
  75. )
  76. test_scenario_cfg = Config.configure_scenario(
  77. id="test_scenario",
  78. task_configs=[test_task_cfg],
  79. additional_data_node_configs=[test_pickle_dn_cfg],
  80. comparators={test_json_dn_cfg.id: compare_function},
  81. frequency=Frequency.DAILY,
  82. )
  83. test_scenario_cfg.add_sequences({"sequence1": [test_task_cfg]})
  84. Config.add_migration_function("1.0", test_csv_dn_cfg, migrate_csv_path)
  85. return test_scenario_cfg
  86. def test_read_write_toml_configuration_file():
  87. expected_toml_config = f"""
  88. [TAIPY]
  89. [JOB]
  90. mode = "development"
  91. max_nb_of_workers = "1:int"
  92. [CORE]
  93. root_folder = "./taipy/"
  94. storage_folder = ".data/"
  95. repository_type = "filesystem"
  96. read_entity_retry = "0:int"
  97. mode = "development"
  98. version_number = ""
  99. force = "False:bool"
  100. core_version = "{CoreSection._CURRENT_CORE_VERSION}"
  101. [DATA_NODE.default]
  102. storage_type = "pickle"
  103. scope = "SCENARIO:SCOPE"
  104. [DATA_NODE.test_csv_dn]
  105. storage_type = "csv"
  106. scope = "GLOBAL:SCOPE"
  107. validity_period = "1d0h0m0s:timedelta"
  108. path = "./test.csv"
  109. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  110. encoding = "utf-8"
  111. has_header = "True:bool"
  112. [DATA_NODE.test_json_dn]
  113. storage_type = "json"
  114. scope = "SCENARIO:SCOPE"
  115. default_path = "./test.json"
  116. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  117. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  118. encoding = "utf-8"
  119. [DATA_NODE.test_pickle_dn]
  120. storage_type = "pickle"
  121. scope = "SCENARIO:SCOPE"
  122. validity_period = "1d0h0m0s:timedelta"
  123. path = "./test.p"
  124. [TASK.default]
  125. inputs = []
  126. outputs = []
  127. skippable = "False:bool"
  128. [TASK.test_task]
  129. function = "tests.core.config.test_config_serialization.multiply:function"
  130. inputs = [ "test_csv_dn:SECTION",]
  131. outputs = [ "test_json_dn:SECTION",]
  132. skippable = "False:bool"
  133. [SCENARIO.default]
  134. tasks = []
  135. additional_data_nodes = []
  136. [SCENARIO.test_scenario]
  137. tasks = [ "test_task:SECTION",]
  138. additional_data_nodes = [ "test_pickle_dn:SECTION",]
  139. frequency = "DAILY:FREQUENCY"
  140. [VERSION_MIGRATION.migration_fcts."1.0"]
  141. test_csv_dn = "tests.core.config.test_config_serialization.migrate_csv_path:function"
  142. [SCENARIO.default.comparators]
  143. [SCENARIO.default.sequences]
  144. [SCENARIO.test_scenario.comparators]
  145. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  146. [SCENARIO.test_scenario.sequences]
  147. sequence1 = [ "test_task:SECTION",]
  148. """.strip()
  149. config_test_scenario()
  150. tf = NamedTemporaryFile()
  151. Config.backup(tf.filename)
  152. actual_config = tf.read().strip()
  153. assert actual_config == expected_toml_config
  154. Config.load(tf.filename)
  155. tf2 = NamedTemporaryFile()
  156. Config.backup(tf2.filename)
  157. actual_config_2 = tf2.read().strip()
  158. assert actual_config_2 == expected_toml_config
  159. assert Config.unique_sections is not None
  160. assert len(Config.unique_sections) == 3
  161. assert Config.unique_sections[JobConfig.name].mode == "development"
  162. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  163. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  164. assert Config.sections is not None
  165. assert len(Config.sections) == 3
  166. assert Config.sections[DataNodeConfig.name] is not None
  167. assert len(Config.sections[DataNodeConfig.name]) == 4
  168. assert Config.sections[DataNodeConfig.name]["default"] is not None
  169. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  170. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  171. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  172. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  173. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  174. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  175. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  176. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  177. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  178. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  179. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  180. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  181. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  182. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  183. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  184. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  185. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  186. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  187. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  188. assert Config.sections[TaskConfig.name] is not None
  189. assert len(Config.sections[TaskConfig.name]) == 2
  190. assert Config.sections[TaskConfig.name]["default"] is not None
  191. assert Config.sections[TaskConfig.name]["default"].inputs == []
  192. assert Config.sections[TaskConfig.name]["default"].outputs == []
  193. assert Config.sections[TaskConfig.name]["default"].function is None
  194. assert not Config.sections[TaskConfig.name]["default"].skippable
  195. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  196. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  197. ]
  198. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  199. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  200. ]
  201. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  202. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  203. assert Config.sections[ScenarioConfig.name] is not None
  204. assert len(Config.sections[ScenarioConfig.name]) == 2
  205. assert Config.sections[ScenarioConfig.name]["default"] is not None
  206. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  207. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  208. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  209. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  210. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  211. Config.sections[TaskConfig.name]["test_task"].id
  212. ]
  213. assert [
  214. additional_data_node.id
  215. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  216. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  217. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  218. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  219. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  220. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  221. ]
  222. sequences = {}
  223. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  224. sequences[sequence_name] = [task.id for task in sequence_tasks]
  225. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  226. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  227. "test_json_dn": [compare_function]
  228. }
  229. def test_read_write_json_configuration_file():
  230. expected_json_config = (
  231. """{
  232. "TAIPY": {},
  233. "JOB": {
  234. "mode": "development",
  235. "max_nb_of_workers": "1:int"
  236. },
  237. "CORE": {
  238. "root_folder": "./taipy/",
  239. "storage_folder": ".data/",
  240. "repository_type": "filesystem",
  241. "read_entity_retry": "0:int",
  242. "mode": "development",
  243. "version_number": "",
  244. "force": "False:bool","""
  245. + f"""
  246. "core_version": "{CoreSection._CURRENT_CORE_VERSION}"
  247. """
  248. + """
  249. },
  250. "VERSION_MIGRATION": {
  251. "migration_fcts": {
  252. "1.0": {
  253. "test_csv_dn": "tests.core.config.test_config_serialization.migrate_csv_path:function"
  254. }
  255. }
  256. },
  257. "DATA_NODE": {
  258. "default": {
  259. "storage_type": "pickle",
  260. "scope": "SCENARIO:SCOPE"
  261. },
  262. "test_csv_dn": {
  263. "storage_type": "csv",
  264. "scope": "GLOBAL:SCOPE",
  265. "validity_period": "1d0h0m0s:timedelta",
  266. "path": "./test.csv",
  267. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  268. "encoding": "utf-8",
  269. "has_header": "True:bool"
  270. },
  271. "test_json_dn": {
  272. "storage_type": "json",
  273. "scope": "SCENARIO:SCOPE",
  274. "default_path": "./test.json",
  275. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  276. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class",
  277. "encoding": "utf-8"
  278. },
  279. "test_pickle_dn": {
  280. "storage_type": "pickle",
  281. "scope": "SCENARIO:SCOPE",
  282. "validity_period": "1d0h0m0s:timedelta",
  283. "path": "./test.p"
  284. }
  285. },
  286. "TASK": {
  287. "default": {
  288. "function": null,
  289. "inputs": [],
  290. "outputs": [],
  291. "skippable": "False:bool"
  292. },
  293. "test_task": {
  294. "function": "tests.core.config.test_config_serialization.multiply:function",
  295. "inputs": [
  296. "test_csv_dn:SECTION"
  297. ],
  298. "outputs": [
  299. "test_json_dn:SECTION"
  300. ],
  301. "skippable": "False:bool"
  302. }
  303. },
  304. "SCENARIO": {
  305. "default": {
  306. "comparators": {},
  307. "tasks": [],
  308. "additional_data_nodes": [],
  309. "frequency": null,
  310. "sequences": {}
  311. },
  312. "test_scenario": {
  313. "comparators": {
  314. "test_json_dn": [
  315. "tests.core.config.test_config_serialization.compare_function:function"
  316. ]
  317. },
  318. "tasks": [
  319. "test_task:SECTION"
  320. ],
  321. "additional_data_nodes": [
  322. "test_pickle_dn:SECTION"
  323. ],
  324. "frequency": "DAILY:FREQUENCY",
  325. "sequences": {
  326. "sequence1": [
  327. "test_task:SECTION"
  328. ]
  329. }
  330. }
  331. }
  332. }
  333. """.strip()
  334. )
  335. Config._serializer = _JsonSerializer()
  336. config_test_scenario()
  337. tf = NamedTemporaryFile()
  338. Config.backup(tf.filename)
  339. actual_config = tf.read().strip()
  340. assert actual_config == expected_json_config
  341. Config.load(tf.filename)
  342. tf2 = NamedTemporaryFile()
  343. Config.backup(tf2.filename)
  344. actual_config_2 = tf2.read().strip()
  345. assert actual_config_2 == expected_json_config
  346. assert Config.unique_sections is not None
  347. assert len(Config.unique_sections) == 3
  348. assert Config.unique_sections[JobConfig.name].mode == "development"
  349. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  350. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  351. assert Config.sections is not None
  352. assert len(Config.sections) == 3
  353. assert Config.sections[DataNodeConfig.name] is not None
  354. assert len(Config.sections[DataNodeConfig.name]) == 4
  355. assert Config.sections[DataNodeConfig.name]["default"] is not None
  356. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  357. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  358. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  359. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  360. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  361. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  362. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  363. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  364. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  365. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  366. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  367. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  368. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  369. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  370. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  371. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  372. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  373. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  374. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  375. assert Config.sections[TaskConfig.name] is not None
  376. assert len(Config.sections[TaskConfig.name]) == 2
  377. assert Config.sections[TaskConfig.name]["default"] is not None
  378. assert Config.sections[TaskConfig.name]["default"].inputs == []
  379. assert Config.sections[TaskConfig.name]["default"].outputs == []
  380. assert Config.sections[TaskConfig.name]["default"].function is None
  381. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  382. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  383. ]
  384. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  385. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  386. ]
  387. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  388. assert Config.sections[ScenarioConfig.name] is not None
  389. assert len(Config.sections[ScenarioConfig.name]) == 2
  390. assert Config.sections[ScenarioConfig.name]["default"] is not None
  391. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  392. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  393. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  394. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  395. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  396. Config.sections[TaskConfig.name]["test_task"].id
  397. ]
  398. assert [
  399. additional_data_node.id
  400. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  401. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  402. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  403. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  404. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  405. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  406. ]
  407. sequences = {}
  408. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  409. sequences[sequence_name] = [task.id for task in sequence_tasks]
  410. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  411. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  412. "test_json_dn": [compare_function]
  413. }
  414. def test_read_write_toml_configuration_file_migrate_sequence_in_scenario():
  415. old_toml_config = """
  416. [TAIPY]
  417. [JOB]
  418. mode = "development"
  419. max_nb_of_workers = "1:int"
  420. [CORE]
  421. root_folder = "./taipy/"
  422. storage_folder = ".data/"
  423. repository_type = "filesystem"
  424. mode = "development"
  425. version_number = ""
  426. force = "False:bool"
  427. [DATA_NODE.default]
  428. storage_type = "pickle"
  429. scope = "SCENARIO:SCOPE"
  430. [DATA_NODE.test_csv_dn]
  431. storage_type = "csv"
  432. scope = "GLOBAL:SCOPE"
  433. validity_period = "1d0h0m0s:timedelta"
  434. path = "./test.csv"
  435. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  436. has_header = "True:bool"
  437. [DATA_NODE.test_json_dn]
  438. storage_type = "json"
  439. scope = "SCENARIO:SCOPE"
  440. default_path = "./test.json"
  441. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  442. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  443. [TASK.default]
  444. inputs = []
  445. outputs = []
  446. skippable = "False:bool"
  447. [TASK.test_task]
  448. function = "tests.core.config.test_config_serialization.multiply:function"
  449. inputs = [ "test_csv_dn:SECTION",]
  450. outputs = [ "test_json_dn:SECTION",]
  451. skippable = "False:bool"
  452. [SCENARIO.default]
  453. [SCENARIO.test_scenario]
  454. tasks = [ "test_task:SECTION",]
  455. sequences.test_sequence = [ "test_task:SECTION",]
  456. frequency = "DAILY:FREQUENCY"
  457. [VERSION_MIGRATION.migration_fcts."1.0"]
  458. test_csv_dn = "tests.core.config.test_config_serialization.migrate_csv_path:function"
  459. [SCENARIO.default.comparators]
  460. [SCENARIO.test_scenario.comparators]
  461. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  462. """.strip()
  463. config_test_scenario()
  464. tf = NamedTemporaryFile()
  465. with open(tf.filename, "w") as fd:
  466. fd.writelines(old_toml_config)
  467. Config.restore(tf.filename)
  468. assert Config.unique_sections is not None
  469. assert len(Config.unique_sections) == 3
  470. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  471. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  472. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  473. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  474. assert Config.unique_sections[CoreSection.name].mode == "development"
  475. assert Config.unique_sections[CoreSection.name].version_number == ""
  476. assert Config.unique_sections[CoreSection.name].force is False
  477. assert Config.unique_sections[JobConfig.name].mode == "development"
  478. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  479. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  480. assert Config.sections is not None
  481. assert len(Config.sections) == 3
  482. assert Config.sections[DataNodeConfig.name] is not None
  483. assert len(Config.sections[DataNodeConfig.name]) == 3
  484. assert Config.sections[DataNodeConfig.name]["default"] is not None
  485. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  486. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  487. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  488. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  489. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  490. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  491. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  492. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  493. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  494. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  495. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  496. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  497. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  498. assert Config.sections[TaskConfig.name] is not None
  499. assert len(Config.sections[TaskConfig.name]) == 2
  500. assert Config.sections[TaskConfig.name]["default"] is not None
  501. assert Config.sections[TaskConfig.name]["default"].inputs == []
  502. assert Config.sections[TaskConfig.name]["default"].outputs == []
  503. assert Config.sections[TaskConfig.name]["default"].function is None
  504. assert not Config.sections[TaskConfig.name]["default"].skippable
  505. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  506. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  507. ]
  508. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  509. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  510. ]
  511. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  512. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  513. assert Config.sections[ScenarioConfig.name] is not None
  514. assert len(Config.sections[ScenarioConfig.name]) == 2
  515. assert Config.sections[ScenarioConfig.name]["default"] is not None
  516. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  517. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  518. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  519. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  520. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  521. Config.sections[TaskConfig.name]["test_task"].id
  522. ]
  523. assert [
  524. additional_data_node.id
  525. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  526. ] == []
  527. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  528. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  529. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  530. ]
  531. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  532. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  533. }
  534. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  535. "test_json_dn": [compare_function]
  536. }
  537. def test_read_write_json_configuration_file_migrate_sequence_in_scenario():
  538. old_json_config = """
  539. {
  540. "TAIPY": {},
  541. "JOB": {
  542. "mode": "development",
  543. "max_nb_of_workers": "1:int"
  544. },
  545. "CORE": {
  546. "root_folder": "./taipy/",
  547. "storage_folder": ".data/",
  548. "repository_type": "filesystem",
  549. "read_entity_retry": "0:int",
  550. "mode": "development",
  551. "version_number": "",
  552. "force": "False:bool"
  553. },
  554. "VERSION_MIGRATION": {
  555. "migration_fcts": {
  556. "1.0": {
  557. "test_csv_dn": "tests.core.config.test_config_serialization.migrate_csv_path:function"
  558. }
  559. }
  560. },
  561. "DATA_NODE": {
  562. "default": {
  563. "storage_type": "pickle",
  564. "scope": "SCENARIO:SCOPE"
  565. },
  566. "test_csv_dn": {
  567. "storage_type": "csv",
  568. "scope": "GLOBAL:SCOPE",
  569. "validity_period": "1d0h0m0s:timedelta",
  570. "path": "./test.csv",
  571. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  572. "has_header": "True:bool"
  573. },
  574. "test_json_dn": {
  575. "storage_type": "json",
  576. "scope": "SCENARIO:SCOPE",
  577. "default_path": "./test.json",
  578. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  579. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class"
  580. }
  581. },
  582. "TASK": {
  583. "default": {
  584. "function": null,
  585. "inputs": [],
  586. "outputs": [],
  587. "skippable": "False:bool"
  588. },
  589. "test_task": {
  590. "function": "tests.core.config.test_config_serialization.multiply:function",
  591. "inputs": [
  592. "test_csv_dn:SECTION"
  593. ],
  594. "outputs": [
  595. "test_json_dn:SECTION"
  596. ],
  597. "skippable": "False:bool"
  598. }
  599. },
  600. "SCENARIO": {
  601. "default": {
  602. "comparators": {},
  603. "sequences": {},
  604. "frequency": null
  605. },
  606. "test_scenario": {
  607. "comparators": {
  608. "test_json_dn": [
  609. "tests.core.config.test_config_serialization.compare_function:function"
  610. ]
  611. },
  612. "tasks": [
  613. "test_task:SECTION"
  614. ],
  615. "sequences": {
  616. "test_sequence": [
  617. "test_task:SECTION"
  618. ]
  619. },
  620. "frequency": "DAILY:FREQUENCY"
  621. }
  622. }
  623. }
  624. """.strip()
  625. Config._serializer = _JsonSerializer()
  626. config_test_scenario()
  627. tf = NamedTemporaryFile()
  628. with open(tf.filename, "w") as fd:
  629. fd.writelines(old_json_config)
  630. Config.restore(tf.filename)
  631. assert Config.unique_sections is not None
  632. assert len(Config.unique_sections) == 3
  633. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  634. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  635. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  636. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  637. assert Config.unique_sections[CoreSection.name].mode == "development"
  638. assert Config.unique_sections[CoreSection.name].version_number == ""
  639. assert Config.unique_sections[CoreSection.name].force is False
  640. assert Config.unique_sections[JobConfig.name].mode == "development"
  641. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  642. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  643. assert Config.sections is not None
  644. assert len(Config.sections) == 3
  645. assert Config.sections[DataNodeConfig.name] is not None
  646. assert len(Config.sections[DataNodeConfig.name]) == 3
  647. assert Config.sections[DataNodeConfig.name]["default"] is not None
  648. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  649. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  650. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  651. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  652. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  653. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  654. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  655. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  656. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  657. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  658. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  659. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  660. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  661. assert Config.sections[TaskConfig.name] is not None
  662. assert len(Config.sections[TaskConfig.name]) == 2
  663. assert Config.sections[TaskConfig.name]["default"] is not None
  664. assert Config.sections[TaskConfig.name]["default"].inputs == []
  665. assert Config.sections[TaskConfig.name]["default"].outputs == []
  666. assert Config.sections[TaskConfig.name]["default"].function is None
  667. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  668. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  669. ]
  670. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  671. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  672. ]
  673. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  674. assert Config.sections[ScenarioConfig.name] is not None
  675. assert len(Config.sections[ScenarioConfig.name]) == 2
  676. assert Config.sections[ScenarioConfig.name]["default"] is not None
  677. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  678. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  679. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  680. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  681. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  682. Config.sections[TaskConfig.name]["test_task"].id
  683. ]
  684. assert [
  685. additional_data_node.id
  686. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  687. ] == []
  688. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  689. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  690. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  691. ]
  692. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  693. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  694. }
  695. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  696. "test_json_dn": [compare_function]
  697. }