test_config_serialization.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772
  1. # Copyright 2021-2025 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import datetime
  12. import json
  13. from taipy.common.config import Config
  14. from taipy.common.config._serializer._json_serializer import _JsonSerializer
  15. from taipy.core.common.frequency import Frequency
  16. from taipy.core.common.scope import Scope
  17. from taipy.core.config import CoreSection, DataNodeConfig, JobConfig, ScenarioConfig, TaskConfig
  18. from tests.core.utils.named_temporary_file import NamedTemporaryFile
  19. def multiply(a):
  20. return a * 2
  21. def migrate_csv_path(dn):
  22. dn.path = "foo.csv"
  23. def compare_function(*data_node_results):
  24. comparison_result = {}
  25. current_result_index = 0
  26. for current_result in data_node_results:
  27. comparison_result[current_result_index] = {}
  28. next_result_index = 0
  29. for next_result in data_node_results:
  30. comparison_result[current_result_index][next_result_index] = next_result - current_result
  31. next_result_index += 1
  32. current_result_index += 1
  33. return comparison_result
  34. class CustomClass:
  35. a = None
  36. b = None
  37. class CustomEncoder(json.JSONEncoder):
  38. def default(self, o):
  39. if isinstance(o, datetime):
  40. result = {"__type__": "Datetime", "__value__": o.isoformat()}
  41. else:
  42. result = json.JSONEncoder.default(self, o)
  43. return result
  44. class CustomDecoder(json.JSONDecoder):
  45. def __init__(self, *args, **kwargs):
  46. json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
  47. def object_hook(self, source):
  48. if source.get("__type__") == "Datetime":
  49. return datetime.fromisoformat(source.get("__value__"))
  50. else:
  51. return source
  52. def config_test_scenario():
  53. test_csv_dn_cfg = Config.configure_csv_data_node(
  54. id="test_csv_dn",
  55. path="./test.csv",
  56. exposed_type=CustomClass,
  57. scope=Scope.GLOBAL,
  58. validity_period=datetime.timedelta(1),
  59. )
  60. test_json_dn_cfg = Config.configure_json_data_node(
  61. id="test_json_dn",
  62. default_path="./test.json",
  63. encoder=CustomEncoder,
  64. decoder=CustomDecoder,
  65. )
  66. test_pickle_dn_cfg = Config.configure_pickle_data_node(
  67. id="test_pickle_dn",
  68. path="./test.p",
  69. scope=Scope.SCENARIO,
  70. validity_period=datetime.timedelta(1),
  71. )
  72. test_task_cfg = Config.configure_task(
  73. id="test_task", input=test_csv_dn_cfg, function=multiply, output=test_json_dn_cfg
  74. )
  75. test_scenario_cfg = Config.configure_scenario(
  76. id="test_scenario",
  77. task_configs=[test_task_cfg],
  78. additional_data_node_configs=[test_pickle_dn_cfg],
  79. comparators={test_json_dn_cfg.id: compare_function},
  80. frequency=Frequency.DAILY,
  81. )
  82. test_scenario_cfg.add_sequences({"sequence1": [test_task_cfg]})
  83. return test_scenario_cfg
  84. def test_read_write_toml_configuration_file():
  85. expected_toml_config = f"""
  86. [TAIPY]
  87. [JOB]
  88. mode = "development"
  89. [CORE]
  90. root_folder = "./taipy/"
  91. storage_folder = "user_data/"
  92. taipy_storage_folder = ".taipy/"
  93. repository_type = "filesystem"
  94. read_entity_retry = "0:int"
  95. mode = "development"
  96. version_number = ""
  97. force = "False:bool"
  98. core_version = "{CoreSection._CURRENT_CORE_VERSION}"
  99. [DATA_NODE.default]
  100. storage_type = "pickle"
  101. scope = "SCENARIO:SCOPE"
  102. [DATA_NODE.test_csv_dn]
  103. storage_type = "csv"
  104. scope = "GLOBAL:SCOPE"
  105. validity_period = "1d0h0m0s:timedelta"
  106. path = "./test.csv"
  107. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  108. encoding = "utf-8"
  109. has_header = "True:bool"
  110. separator = ","
  111. [DATA_NODE.test_json_dn]
  112. storage_type = "json"
  113. scope = "SCENARIO:SCOPE"
  114. default_path = "./test.json"
  115. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  116. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  117. encoding = "utf-8"
  118. [DATA_NODE.test_pickle_dn]
  119. storage_type = "pickle"
  120. scope = "SCENARIO:SCOPE"
  121. validity_period = "1d0h0m0s:timedelta"
  122. path = "./test.p"
  123. [TASK.default]
  124. inputs = []
  125. outputs = []
  126. skippable = "False:bool"
  127. [TASK.test_task]
  128. function = "tests.core.config.test_config_serialization.multiply:function"
  129. inputs = [ "test_csv_dn:SECTION",]
  130. outputs = [ "test_json_dn:SECTION",]
  131. skippable = "False:bool"
  132. [SCENARIO.default]
  133. tasks = []
  134. additional_data_nodes = []
  135. [SCENARIO.test_scenario]
  136. tasks = [ "test_task:SECTION",]
  137. additional_data_nodes = [ "test_pickle_dn:SECTION",]
  138. frequency = "DAILY:FREQUENCY"
  139. [SCENARIO.default.comparators]
  140. [SCENARIO.default.sequences]
  141. [SCENARIO.test_scenario.comparators]
  142. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  143. [SCENARIO.test_scenario.sequences]
  144. sequence1 = [ "test_task:SECTION",]
  145. """.strip()
  146. config_test_scenario()
  147. tf = NamedTemporaryFile()
  148. Config.backup(tf.filename)
  149. actual_config = tf.read().strip()
  150. assert actual_config == expected_toml_config
  151. Config.load(tf.filename)
  152. tf2 = NamedTemporaryFile()
  153. Config.backup(tf2.filename)
  154. actual_config_2 = tf2.read().strip()
  155. assert actual_config_2 == expected_toml_config
  156. assert Config.unique_sections is not None
  157. assert len(Config.unique_sections) == 2
  158. assert Config.unique_sections[JobConfig.name].mode == "development"
  159. assert Config.unique_sections[JobConfig.name].max_nb_of_workers is None
  160. assert Config.sections is not None
  161. assert len(Config.sections) == 3
  162. assert Config.sections[DataNodeConfig.name] is not None
  163. assert len(Config.sections[DataNodeConfig.name]) == 4
  164. assert Config.sections[DataNodeConfig.name]["default"] is not None
  165. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  166. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  167. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  168. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  169. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  170. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  171. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  172. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  173. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  174. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  175. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  176. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  177. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  178. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  179. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  180. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  181. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  182. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  183. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  184. assert Config.sections[TaskConfig.name] is not None
  185. assert len(Config.sections[TaskConfig.name]) == 2
  186. assert Config.sections[TaskConfig.name]["default"] is not None
  187. assert Config.sections[TaskConfig.name]["default"].inputs == []
  188. assert Config.sections[TaskConfig.name]["default"].outputs == []
  189. assert Config.sections[TaskConfig.name]["default"].function is None
  190. assert not Config.sections[TaskConfig.name]["default"].skippable
  191. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  192. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  193. ]
  194. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  195. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  196. ]
  197. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  198. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  199. assert Config.sections[ScenarioConfig.name] is not None
  200. assert len(Config.sections[ScenarioConfig.name]) == 2
  201. assert Config.sections[ScenarioConfig.name]["default"] is not None
  202. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  203. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  204. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  205. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  206. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  207. Config.sections[TaskConfig.name]["test_task"].id
  208. ]
  209. assert [
  210. additional_data_node.id
  211. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  212. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  213. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  214. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  215. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  216. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  217. ]
  218. sequences = {}
  219. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  220. sequences[sequence_name] = [task.id for task in sequence_tasks]
  221. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  222. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  223. "test_json_dn": [compare_function]
  224. }
  225. def test_read_write_json_configuration_file():
  226. expected_json_config = (
  227. """{
  228. "TAIPY": {},
  229. "JOB": {
  230. "mode": "development"
  231. },
  232. "CORE": {
  233. "root_folder": "./taipy/",
  234. "storage_folder": "user_data/",
  235. "taipy_storage_folder": ".taipy/",
  236. "repository_type": "filesystem",
  237. "read_entity_retry": "0:int",
  238. "mode": "development",
  239. "version_number": "",
  240. "force": "False:bool","""
  241. + f"""
  242. "core_version": "{CoreSection._CURRENT_CORE_VERSION}"
  243. """
  244. + """
  245. },
  246. "DATA_NODE": {
  247. "default": {
  248. "storage_type": "pickle",
  249. "scope": "SCENARIO:SCOPE"
  250. },
  251. "test_csv_dn": {
  252. "storage_type": "csv",
  253. "scope": "GLOBAL:SCOPE",
  254. "validity_period": "1d0h0m0s:timedelta",
  255. "path": "./test.csv",
  256. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  257. "encoding": "utf-8",
  258. "has_header": "True:bool",
  259. "separator": ","
  260. },
  261. "test_json_dn": {
  262. "storage_type": "json",
  263. "scope": "SCENARIO:SCOPE",
  264. "default_path": "./test.json",
  265. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  266. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class",
  267. "encoding": "utf-8"
  268. },
  269. "test_pickle_dn": {
  270. "storage_type": "pickle",
  271. "scope": "SCENARIO:SCOPE",
  272. "validity_period": "1d0h0m0s:timedelta",
  273. "path": "./test.p"
  274. }
  275. },
  276. "TASK": {
  277. "default": {
  278. "function": null,
  279. "inputs": [],
  280. "outputs": [],
  281. "skippable": "False:bool"
  282. },
  283. "test_task": {
  284. "function": "tests.core.config.test_config_serialization.multiply:function",
  285. "inputs": [
  286. "test_csv_dn:SECTION"
  287. ],
  288. "outputs": [
  289. "test_json_dn:SECTION"
  290. ],
  291. "skippable": "False:bool"
  292. }
  293. },
  294. "SCENARIO": {
  295. "default": {
  296. "comparators": {},
  297. "tasks": [],
  298. "additional_data_nodes": [],
  299. "frequency": null,
  300. "sequences": {}
  301. },
  302. "test_scenario": {
  303. "comparators": {
  304. "test_json_dn": [
  305. "tests.core.config.test_config_serialization.compare_function:function"
  306. ]
  307. },
  308. "tasks": [
  309. "test_task:SECTION"
  310. ],
  311. "additional_data_nodes": [
  312. "test_pickle_dn:SECTION"
  313. ],
  314. "frequency": "DAILY:FREQUENCY",
  315. "sequences": {
  316. "sequence1": [
  317. "test_task:SECTION"
  318. ]
  319. }
  320. }
  321. }
  322. }
  323. """.strip()
  324. )
  325. Config._serializer = _JsonSerializer()
  326. config_test_scenario()
  327. tf = NamedTemporaryFile()
  328. Config.backup(tf.filename)
  329. actual_config = tf.read().strip()
  330. assert actual_config == expected_json_config
  331. Config.load(tf.filename)
  332. tf2 = NamedTemporaryFile()
  333. Config.backup(tf2.filename)
  334. actual_config_2 = tf2.read().strip()
  335. assert actual_config_2 == expected_json_config
  336. assert Config.unique_sections is not None
  337. assert len(Config.unique_sections) == 2
  338. assert Config.unique_sections[JobConfig.name].mode == "development"
  339. assert Config.unique_sections[JobConfig.name].max_nb_of_workers is None
  340. assert Config.sections is not None
  341. assert len(Config.sections) == 3
  342. assert Config.sections[DataNodeConfig.name] is not None
  343. assert len(Config.sections[DataNodeConfig.name]) == 4
  344. assert Config.sections[DataNodeConfig.name]["default"] is not None
  345. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  346. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  347. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  348. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  349. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  350. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  351. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  352. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  353. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  354. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  355. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  356. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  357. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  358. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  359. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  360. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  361. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  362. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  363. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  364. assert Config.sections[TaskConfig.name] is not None
  365. assert len(Config.sections[TaskConfig.name]) == 2
  366. assert Config.sections[TaskConfig.name]["default"] is not None
  367. assert Config.sections[TaskConfig.name]["default"].inputs == []
  368. assert Config.sections[TaskConfig.name]["default"].outputs == []
  369. assert Config.sections[TaskConfig.name]["default"].function is None
  370. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  371. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  372. ]
  373. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  374. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  375. ]
  376. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  377. assert Config.sections[ScenarioConfig.name] is not None
  378. assert len(Config.sections[ScenarioConfig.name]) == 2
  379. assert Config.sections[ScenarioConfig.name]["default"] is not None
  380. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  381. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  382. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  383. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  384. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  385. Config.sections[TaskConfig.name]["test_task"].id
  386. ]
  387. assert [
  388. additional_data_node.id
  389. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  390. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  391. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  392. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  393. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  394. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  395. ]
  396. sequences = {}
  397. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  398. sequences[sequence_name] = [task.id for task in sequence_tasks]
  399. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  400. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  401. "test_json_dn": [compare_function]
  402. }
  403. def test_read_write_toml_configuration_file_migrate_sequence_in_scenario():
  404. old_toml_config = """
  405. [TAIPY]
  406. [JOB]
  407. mode = "development"
  408. max_nb_of_workers = "1:int"
  409. [CORE]
  410. root_folder = "./taipy/"
  411. storage_folder = ".data/"
  412. repository_type = "filesystem"
  413. mode = "development"
  414. version_number = ""
  415. force = "False:bool"
  416. [DATA_NODE.default]
  417. storage_type = "pickle"
  418. scope = "SCENARIO:SCOPE"
  419. [DATA_NODE.test_csv_dn]
  420. storage_type = "csv"
  421. scope = "GLOBAL:SCOPE"
  422. validity_period = "1d0h0m0s:timedelta"
  423. path = "./test.csv"
  424. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  425. has_header = "True:bool"
  426. [DATA_NODE.test_json_dn]
  427. storage_type = "json"
  428. scope = "SCENARIO:SCOPE"
  429. default_path = "./test.json"
  430. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  431. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  432. [TASK.default]
  433. inputs = []
  434. outputs = []
  435. skippable = "False:bool"
  436. [TASK.test_task]
  437. function = "tests.core.config.test_config_serialization.multiply:function"
  438. inputs = [ "test_csv_dn:SECTION",]
  439. outputs = [ "test_json_dn:SECTION",]
  440. skippable = "False:bool"
  441. [SCENARIO.default]
  442. [SCENARIO.test_scenario]
  443. tasks = [ "test_task:SECTION",]
  444. sequences.test_sequence = [ "test_task:SECTION",]
  445. frequency = "DAILY:FREQUENCY"
  446. [SCENARIO.default.comparators]
  447. [SCENARIO.test_scenario.comparators]
  448. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  449. """.strip()
  450. config_test_scenario()
  451. tf = NamedTemporaryFile()
  452. with open(tf.filename, "w") as fd:
  453. fd.writelines(old_toml_config)
  454. Config.restore(tf.filename)
  455. assert Config.unique_sections is not None
  456. assert len(Config.unique_sections) == 2
  457. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  458. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  459. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  460. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  461. assert Config.unique_sections[CoreSection.name].mode == "development"
  462. assert Config.unique_sections[CoreSection.name].version_number == ""
  463. assert Config.unique_sections[CoreSection.name].force is False
  464. assert Config.unique_sections[JobConfig.name].mode == "development"
  465. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  466. assert Config.sections is not None
  467. assert len(Config.sections) == 3
  468. assert Config.sections[DataNodeConfig.name] is not None
  469. assert len(Config.sections[DataNodeConfig.name]) == 3
  470. assert Config.sections[DataNodeConfig.name]["default"] is not None
  471. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  472. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  473. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  474. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  475. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  476. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  477. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  478. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  479. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  480. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  481. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  482. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  483. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  484. assert Config.sections[TaskConfig.name] is not None
  485. assert len(Config.sections[TaskConfig.name]) == 2
  486. assert Config.sections[TaskConfig.name]["default"] is not None
  487. assert Config.sections[TaskConfig.name]["default"].inputs == []
  488. assert Config.sections[TaskConfig.name]["default"].outputs == []
  489. assert Config.sections[TaskConfig.name]["default"].function is None
  490. assert not Config.sections[TaskConfig.name]["default"].skippable
  491. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  492. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  493. ]
  494. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  495. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  496. ]
  497. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  498. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  499. assert Config.sections[ScenarioConfig.name] is not None
  500. assert len(Config.sections[ScenarioConfig.name]) == 2
  501. assert Config.sections[ScenarioConfig.name]["default"] is not None
  502. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  503. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  504. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  505. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  506. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  507. Config.sections[TaskConfig.name]["test_task"].id
  508. ]
  509. assert [
  510. additional_data_node.id
  511. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  512. ] == []
  513. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  514. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  515. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  516. ]
  517. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  518. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  519. }
  520. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  521. "test_json_dn": [compare_function]
  522. }
  523. def test_read_write_json_configuration_file_migrate_sequence_in_scenario():
  524. old_json_config = """
  525. {
  526. "TAIPY": {},
  527. "JOB": {
  528. "mode": "development",
  529. "max_nb_of_workers": "1:int"
  530. },
  531. "CORE": {
  532. "root_folder": "./taipy/",
  533. "storage_folder": ".data/",
  534. "repository_type": "filesystem",
  535. "read_entity_retry": "0:int",
  536. "mode": "development",
  537. "version_number": "",
  538. "force": "False:bool"
  539. },
  540. "DATA_NODE": {
  541. "default": {
  542. "storage_type": "pickle",
  543. "scope": "SCENARIO:SCOPE"
  544. },
  545. "test_csv_dn": {
  546. "storage_type": "csv",
  547. "scope": "GLOBAL:SCOPE",
  548. "validity_period": "1d0h0m0s:timedelta",
  549. "path": "./test.csv",
  550. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  551. "has_header": "True:bool"
  552. },
  553. "test_json_dn": {
  554. "storage_type": "json",
  555. "scope": "SCENARIO:SCOPE",
  556. "default_path": "./test.json",
  557. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  558. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class"
  559. }
  560. },
  561. "TASK": {
  562. "default": {
  563. "function": null,
  564. "inputs": [],
  565. "outputs": [],
  566. "skippable": "False:bool"
  567. },
  568. "test_task": {
  569. "function": "tests.core.config.test_config_serialization.multiply:function",
  570. "inputs": [
  571. "test_csv_dn:SECTION"
  572. ],
  573. "outputs": [
  574. "test_json_dn:SECTION"
  575. ],
  576. "skippable": "False:bool"
  577. }
  578. },
  579. "SCENARIO": {
  580. "default": {
  581. "comparators": {},
  582. "sequences": {},
  583. "frequency": null
  584. },
  585. "test_scenario": {
  586. "comparators": {
  587. "test_json_dn": [
  588. "tests.core.config.test_config_serialization.compare_function:function"
  589. ]
  590. },
  591. "tasks": [
  592. "test_task:SECTION"
  593. ],
  594. "sequences": {
  595. "test_sequence": [
  596. "test_task:SECTION"
  597. ]
  598. },
  599. "frequency": "DAILY:FREQUENCY"
  600. }
  601. }
  602. }
  603. """.strip()
  604. Config._serializer = _JsonSerializer()
  605. config_test_scenario()
  606. tf = NamedTemporaryFile()
  607. with open(tf.filename, "w") as fd:
  608. fd.writelines(old_json_config)
  609. Config.restore(tf.filename)
  610. assert Config.unique_sections is not None
  611. assert len(Config.unique_sections) == 2
  612. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  613. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  614. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  615. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  616. assert Config.unique_sections[CoreSection.name].mode == "development"
  617. assert Config.unique_sections[CoreSection.name].version_number == ""
  618. assert Config.unique_sections[CoreSection.name].force is False
  619. assert Config.unique_sections[JobConfig.name].mode == "development"
  620. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  621. assert Config.sections is not None
  622. assert len(Config.sections) == 3
  623. assert Config.sections[DataNodeConfig.name] is not None
  624. assert len(Config.sections[DataNodeConfig.name]) == 3
  625. assert Config.sections[DataNodeConfig.name]["default"] is not None
  626. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  627. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  628. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  629. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  630. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  631. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  632. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  633. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  634. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  635. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  636. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  637. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  638. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  639. assert Config.sections[TaskConfig.name] is not None
  640. assert len(Config.sections[TaskConfig.name]) == 2
  641. assert Config.sections[TaskConfig.name]["default"] is not None
  642. assert Config.sections[TaskConfig.name]["default"].inputs == []
  643. assert Config.sections[TaskConfig.name]["default"].outputs == []
  644. assert Config.sections[TaskConfig.name]["default"].function is None
  645. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  646. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  647. ]
  648. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  649. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  650. ]
  651. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  652. assert Config.sections[ScenarioConfig.name] is not None
  653. assert len(Config.sections[ScenarioConfig.name]) == 2
  654. assert Config.sections[ScenarioConfig.name]["default"] is not None
  655. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  656. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  657. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  658. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  659. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  660. Config.sections[TaskConfig.name]["test_task"].id
  661. ]
  662. assert [
  663. additional_data_node.id
  664. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  665. ] == []
  666. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  667. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  668. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  669. ]
  670. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  671. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  672. }
  673. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  674. "test_json_dn": [compare_function]
  675. }