test_config_serialization.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import datetime
  12. import json
  13. from taipy.common.config import Config
  14. from taipy.common.config._serializer._json_serializer import _JsonSerializer
  15. from taipy.common.config.common.frequency import Frequency
  16. from taipy.common.config.common.scope import Scope
  17. from taipy.core.config import CoreSection, DataNodeConfig, JobConfig, ScenarioConfig, TaskConfig
  18. from tests.core.utils.named_temporary_file import NamedTemporaryFile
  19. def multiply(a):
  20. return a * 2
  21. def migrate_csv_path(dn):
  22. dn.path = "foo.csv"
  23. def compare_function(*data_node_results):
  24. comparison_result = {}
  25. current_result_index = 0
  26. for current_result in data_node_results:
  27. comparison_result[current_result_index] = {}
  28. next_result_index = 0
  29. for next_result in data_node_results:
  30. comparison_result[current_result_index][next_result_index] = next_result - current_result
  31. next_result_index += 1
  32. current_result_index += 1
  33. return comparison_result
  34. class CustomClass:
  35. a = None
  36. b = None
  37. class CustomEncoder(json.JSONEncoder):
  38. def default(self, o):
  39. if isinstance(o, datetime):
  40. result = {"__type__": "Datetime", "__value__": o.isoformat()}
  41. else:
  42. result = json.JSONEncoder.default(self, o)
  43. return result
  44. class CustomDecoder(json.JSONDecoder):
  45. def __init__(self, *args, **kwargs):
  46. json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
  47. def object_hook(self, source):
  48. if source.get("__type__") == "Datetime":
  49. return datetime.fromisoformat(source.get("__value__"))
  50. else:
  51. return source
  52. def config_test_scenario():
  53. test_csv_dn_cfg = Config.configure_csv_data_node(
  54. id="test_csv_dn",
  55. path="./test.csv",
  56. exposed_type=CustomClass,
  57. scope=Scope.GLOBAL,
  58. validity_period=datetime.timedelta(1),
  59. )
  60. test_json_dn_cfg = Config.configure_json_data_node(
  61. id="test_json_dn",
  62. default_path="./test.json",
  63. encoder=CustomEncoder,
  64. decoder=CustomDecoder,
  65. )
  66. test_pickle_dn_cfg = Config.configure_pickle_data_node(
  67. id="test_pickle_dn",
  68. path="./test.p",
  69. scope=Scope.SCENARIO,
  70. validity_period=datetime.timedelta(1),
  71. )
  72. test_task_cfg = Config.configure_task(
  73. id="test_task", input=test_csv_dn_cfg, function=multiply, output=test_json_dn_cfg
  74. )
  75. test_scenario_cfg = Config.configure_scenario(
  76. id="test_scenario",
  77. task_configs=[test_task_cfg],
  78. additional_data_node_configs=[test_pickle_dn_cfg],
  79. comparators={test_json_dn_cfg.id: compare_function},
  80. frequency=Frequency.DAILY,
  81. )
  82. test_scenario_cfg.add_sequences({"sequence1": [test_task_cfg]})
  83. return test_scenario_cfg
  84. def test_read_write_toml_configuration_file():
  85. expected_toml_config = f"""
  86. [TAIPY]
  87. [JOB]
  88. mode = "development"
  89. [CORE]
  90. root_folder = "./taipy/"
  91. storage_folder = "user_data/"
  92. taipy_storage_folder = ".taipy/"
  93. repository_type = "filesystem"
  94. read_entity_retry = "0:int"
  95. mode = "development"
  96. version_number = ""
  97. force = "False:bool"
  98. core_version = "{CoreSection._CURRENT_CORE_VERSION}"
  99. [DATA_NODE.default]
  100. storage_type = "pickle"
  101. scope = "SCENARIO:SCOPE"
  102. [DATA_NODE.test_csv_dn]
  103. storage_type = "csv"
  104. scope = "GLOBAL:SCOPE"
  105. validity_period = "1d0h0m0s:timedelta"
  106. path = "./test.csv"
  107. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  108. encoding = "utf-8"
  109. has_header = "True:bool"
  110. [DATA_NODE.test_json_dn]
  111. storage_type = "json"
  112. scope = "SCENARIO:SCOPE"
  113. default_path = "./test.json"
  114. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  115. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  116. encoding = "utf-8"
  117. [DATA_NODE.test_pickle_dn]
  118. storage_type = "pickle"
  119. scope = "SCENARIO:SCOPE"
  120. validity_period = "1d0h0m0s:timedelta"
  121. path = "./test.p"
  122. [TASK.default]
  123. inputs = []
  124. outputs = []
  125. skippable = "False:bool"
  126. [TASK.test_task]
  127. function = "tests.core.config.test_config_serialization.multiply:function"
  128. inputs = [ "test_csv_dn:SECTION",]
  129. outputs = [ "test_json_dn:SECTION",]
  130. skippable = "False:bool"
  131. [SCENARIO.default]
  132. tasks = []
  133. additional_data_nodes = []
  134. [SCENARIO.test_scenario]
  135. tasks = [ "test_task:SECTION",]
  136. additional_data_nodes = [ "test_pickle_dn:SECTION",]
  137. frequency = "DAILY:FREQUENCY"
  138. [SCENARIO.default.comparators]
  139. [SCENARIO.default.sequences]
  140. [SCENARIO.test_scenario.comparators]
  141. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  142. [SCENARIO.test_scenario.sequences]
  143. sequence1 = [ "test_task:SECTION",]
  144. """.strip()
  145. config_test_scenario()
  146. tf = NamedTemporaryFile()
  147. Config.backup(tf.filename)
  148. actual_config = tf.read().strip()
  149. assert actual_config == expected_toml_config
  150. Config.load(tf.filename)
  151. tf2 = NamedTemporaryFile()
  152. Config.backup(tf2.filename)
  153. actual_config_2 = tf2.read().strip()
  154. assert actual_config_2 == expected_toml_config
  155. assert Config.unique_sections is not None
  156. assert len(Config.unique_sections) == 2
  157. assert Config.unique_sections[JobConfig.name].mode == "development"
  158. assert Config.unique_sections[JobConfig.name].max_nb_of_workers is None
  159. assert Config.sections is not None
  160. assert len(Config.sections) == 3
  161. assert Config.sections[DataNodeConfig.name] is not None
  162. assert len(Config.sections[DataNodeConfig.name]) == 4
  163. assert Config.sections[DataNodeConfig.name]["default"] is not None
  164. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  165. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  166. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  167. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  168. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  169. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  170. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  171. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  172. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  173. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  174. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  175. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  176. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  177. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  178. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  179. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  180. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  181. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  182. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  183. assert Config.sections[TaskConfig.name] is not None
  184. assert len(Config.sections[TaskConfig.name]) == 2
  185. assert Config.sections[TaskConfig.name]["default"] is not None
  186. assert Config.sections[TaskConfig.name]["default"].inputs == []
  187. assert Config.sections[TaskConfig.name]["default"].outputs == []
  188. assert Config.sections[TaskConfig.name]["default"].function is None
  189. assert not Config.sections[TaskConfig.name]["default"].skippable
  190. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  191. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  192. ]
  193. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  194. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  195. ]
  196. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  197. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  198. assert Config.sections[ScenarioConfig.name] is not None
  199. assert len(Config.sections[ScenarioConfig.name]) == 2
  200. assert Config.sections[ScenarioConfig.name]["default"] is not None
  201. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  202. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  203. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  204. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  205. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  206. Config.sections[TaskConfig.name]["test_task"].id
  207. ]
  208. assert [
  209. additional_data_node.id
  210. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  211. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  212. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  213. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  214. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  215. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  216. ]
  217. sequences = {}
  218. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  219. sequences[sequence_name] = [task.id for task in sequence_tasks]
  220. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  221. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  222. "test_json_dn": [compare_function]
  223. }
  224. def test_read_write_json_configuration_file():
  225. expected_json_config = (
  226. """{
  227. "TAIPY": {},
  228. "JOB": {
  229. "mode": "development"
  230. },
  231. "CORE": {
  232. "root_folder": "./taipy/",
  233. "storage_folder": "user_data/",
  234. "taipy_storage_folder": ".taipy/",
  235. "repository_type": "filesystem",
  236. "read_entity_retry": "0:int",
  237. "mode": "development",
  238. "version_number": "",
  239. "force": "False:bool","""
  240. + f"""
  241. "core_version": "{CoreSection._CURRENT_CORE_VERSION}"
  242. """
  243. + """
  244. },
  245. "DATA_NODE": {
  246. "default": {
  247. "storage_type": "pickle",
  248. "scope": "SCENARIO:SCOPE"
  249. },
  250. "test_csv_dn": {
  251. "storage_type": "csv",
  252. "scope": "GLOBAL:SCOPE",
  253. "validity_period": "1d0h0m0s:timedelta",
  254. "path": "./test.csv",
  255. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  256. "encoding": "utf-8",
  257. "has_header": "True:bool"
  258. },
  259. "test_json_dn": {
  260. "storage_type": "json",
  261. "scope": "SCENARIO:SCOPE",
  262. "default_path": "./test.json",
  263. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  264. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class",
  265. "encoding": "utf-8"
  266. },
  267. "test_pickle_dn": {
  268. "storage_type": "pickle",
  269. "scope": "SCENARIO:SCOPE",
  270. "validity_period": "1d0h0m0s:timedelta",
  271. "path": "./test.p"
  272. }
  273. },
  274. "TASK": {
  275. "default": {
  276. "function": null,
  277. "inputs": [],
  278. "outputs": [],
  279. "skippable": "False:bool"
  280. },
  281. "test_task": {
  282. "function": "tests.core.config.test_config_serialization.multiply:function",
  283. "inputs": [
  284. "test_csv_dn:SECTION"
  285. ],
  286. "outputs": [
  287. "test_json_dn:SECTION"
  288. ],
  289. "skippable": "False:bool"
  290. }
  291. },
  292. "SCENARIO": {
  293. "default": {
  294. "comparators": {},
  295. "tasks": [],
  296. "additional_data_nodes": [],
  297. "frequency": null,
  298. "sequences": {}
  299. },
  300. "test_scenario": {
  301. "comparators": {
  302. "test_json_dn": [
  303. "tests.core.config.test_config_serialization.compare_function:function"
  304. ]
  305. },
  306. "tasks": [
  307. "test_task:SECTION"
  308. ],
  309. "additional_data_nodes": [
  310. "test_pickle_dn:SECTION"
  311. ],
  312. "frequency": "DAILY:FREQUENCY",
  313. "sequences": {
  314. "sequence1": [
  315. "test_task:SECTION"
  316. ]
  317. }
  318. }
  319. }
  320. }
  321. """.strip()
  322. )
  323. Config._serializer = _JsonSerializer()
  324. config_test_scenario()
  325. tf = NamedTemporaryFile()
  326. Config.backup(tf.filename)
  327. actual_config = tf.read().strip()
  328. assert actual_config == expected_json_config
  329. Config.load(tf.filename)
  330. tf2 = NamedTemporaryFile()
  331. Config.backup(tf2.filename)
  332. actual_config_2 = tf2.read().strip()
  333. assert actual_config_2 == expected_json_config
  334. assert Config.unique_sections is not None
  335. assert len(Config.unique_sections) == 2
  336. assert Config.unique_sections[JobConfig.name].mode == "development"
  337. assert Config.unique_sections[JobConfig.name].max_nb_of_workers is None
  338. assert Config.sections is not None
  339. assert len(Config.sections) == 3
  340. assert Config.sections[DataNodeConfig.name] is not None
  341. assert len(Config.sections[DataNodeConfig.name]) == 4
  342. assert Config.sections[DataNodeConfig.name]["default"] is not None
  343. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  344. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  345. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  346. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  347. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  348. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  349. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  350. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  351. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  352. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  353. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  354. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  355. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  356. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  357. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  358. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  359. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  360. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  361. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  362. assert Config.sections[TaskConfig.name] is not None
  363. assert len(Config.sections[TaskConfig.name]) == 2
  364. assert Config.sections[TaskConfig.name]["default"] is not None
  365. assert Config.sections[TaskConfig.name]["default"].inputs == []
  366. assert Config.sections[TaskConfig.name]["default"].outputs == []
  367. assert Config.sections[TaskConfig.name]["default"].function is None
  368. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  369. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  370. ]
  371. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  372. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  373. ]
  374. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  375. assert Config.sections[ScenarioConfig.name] is not None
  376. assert len(Config.sections[ScenarioConfig.name]) == 2
  377. assert Config.sections[ScenarioConfig.name]["default"] is not None
  378. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  379. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  380. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  381. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  382. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  383. Config.sections[TaskConfig.name]["test_task"].id
  384. ]
  385. assert [
  386. additional_data_node.id
  387. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  388. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  389. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  390. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  391. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  392. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  393. ]
  394. sequences = {}
  395. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  396. sequences[sequence_name] = [task.id for task in sequence_tasks]
  397. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  398. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  399. "test_json_dn": [compare_function]
  400. }
  401. def test_read_write_toml_configuration_file_migrate_sequence_in_scenario():
  402. old_toml_config = """
  403. [TAIPY]
  404. [JOB]
  405. mode = "development"
  406. max_nb_of_workers = "1:int"
  407. [CORE]
  408. root_folder = "./taipy/"
  409. storage_folder = ".data/"
  410. repository_type = "filesystem"
  411. mode = "development"
  412. version_number = ""
  413. force = "False:bool"
  414. [DATA_NODE.default]
  415. storage_type = "pickle"
  416. scope = "SCENARIO:SCOPE"
  417. [DATA_NODE.test_csv_dn]
  418. storage_type = "csv"
  419. scope = "GLOBAL:SCOPE"
  420. validity_period = "1d0h0m0s:timedelta"
  421. path = "./test.csv"
  422. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  423. has_header = "True:bool"
  424. [DATA_NODE.test_json_dn]
  425. storage_type = "json"
  426. scope = "SCENARIO:SCOPE"
  427. default_path = "./test.json"
  428. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  429. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  430. [TASK.default]
  431. inputs = []
  432. outputs = []
  433. skippable = "False:bool"
  434. [TASK.test_task]
  435. function = "tests.core.config.test_config_serialization.multiply:function"
  436. inputs = [ "test_csv_dn:SECTION",]
  437. outputs = [ "test_json_dn:SECTION",]
  438. skippable = "False:bool"
  439. [SCENARIO.default]
  440. [SCENARIO.test_scenario]
  441. tasks = [ "test_task:SECTION",]
  442. sequences.test_sequence = [ "test_task:SECTION",]
  443. frequency = "DAILY:FREQUENCY"
  444. [SCENARIO.default.comparators]
  445. [SCENARIO.test_scenario.comparators]
  446. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  447. """.strip()
  448. config_test_scenario()
  449. tf = NamedTemporaryFile()
  450. with open(tf.filename, "w") as fd:
  451. fd.writelines(old_toml_config)
  452. Config.restore(tf.filename)
  453. assert Config.unique_sections is not None
  454. assert len(Config.unique_sections) == 2
  455. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  456. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  457. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  458. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  459. assert Config.unique_sections[CoreSection.name].mode == "development"
  460. assert Config.unique_sections[CoreSection.name].version_number == ""
  461. assert Config.unique_sections[CoreSection.name].force is False
  462. assert Config.unique_sections[JobConfig.name].mode == "development"
  463. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  464. assert Config.sections is not None
  465. assert len(Config.sections) == 3
  466. assert Config.sections[DataNodeConfig.name] is not None
  467. assert len(Config.sections[DataNodeConfig.name]) == 3
  468. assert Config.sections[DataNodeConfig.name]["default"] is not None
  469. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  470. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  471. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  472. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  473. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  474. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  475. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  476. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  477. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  478. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  479. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  480. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  481. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  482. assert Config.sections[TaskConfig.name] is not None
  483. assert len(Config.sections[TaskConfig.name]) == 2
  484. assert Config.sections[TaskConfig.name]["default"] is not None
  485. assert Config.sections[TaskConfig.name]["default"].inputs == []
  486. assert Config.sections[TaskConfig.name]["default"].outputs == []
  487. assert Config.sections[TaskConfig.name]["default"].function is None
  488. assert not Config.sections[TaskConfig.name]["default"].skippable
  489. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  490. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  491. ]
  492. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  493. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  494. ]
  495. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  496. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  497. assert Config.sections[ScenarioConfig.name] is not None
  498. assert len(Config.sections[ScenarioConfig.name]) == 2
  499. assert Config.sections[ScenarioConfig.name]["default"] is not None
  500. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  501. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  502. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  503. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  504. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  505. Config.sections[TaskConfig.name]["test_task"].id
  506. ]
  507. assert [
  508. additional_data_node.id
  509. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  510. ] == []
  511. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  512. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  513. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  514. ]
  515. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  516. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  517. }
  518. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  519. "test_json_dn": [compare_function]
  520. }
  521. def test_read_write_json_configuration_file_migrate_sequence_in_scenario():
  522. old_json_config = """
  523. {
  524. "TAIPY": {},
  525. "JOB": {
  526. "mode": "development",
  527. "max_nb_of_workers": "1:int"
  528. },
  529. "CORE": {
  530. "root_folder": "./taipy/",
  531. "storage_folder": ".data/",
  532. "repository_type": "filesystem",
  533. "read_entity_retry": "0:int",
  534. "mode": "development",
  535. "version_number": "",
  536. "force": "False:bool"
  537. },
  538. "DATA_NODE": {
  539. "default": {
  540. "storage_type": "pickle",
  541. "scope": "SCENARIO:SCOPE"
  542. },
  543. "test_csv_dn": {
  544. "storage_type": "csv",
  545. "scope": "GLOBAL:SCOPE",
  546. "validity_period": "1d0h0m0s:timedelta",
  547. "path": "./test.csv",
  548. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  549. "has_header": "True:bool"
  550. },
  551. "test_json_dn": {
  552. "storage_type": "json",
  553. "scope": "SCENARIO:SCOPE",
  554. "default_path": "./test.json",
  555. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  556. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class"
  557. }
  558. },
  559. "TASK": {
  560. "default": {
  561. "function": null,
  562. "inputs": [],
  563. "outputs": [],
  564. "skippable": "False:bool"
  565. },
  566. "test_task": {
  567. "function": "tests.core.config.test_config_serialization.multiply:function",
  568. "inputs": [
  569. "test_csv_dn:SECTION"
  570. ],
  571. "outputs": [
  572. "test_json_dn:SECTION"
  573. ],
  574. "skippable": "False:bool"
  575. }
  576. },
  577. "SCENARIO": {
  578. "default": {
  579. "comparators": {},
  580. "sequences": {},
  581. "frequency": null
  582. },
  583. "test_scenario": {
  584. "comparators": {
  585. "test_json_dn": [
  586. "tests.core.config.test_config_serialization.compare_function:function"
  587. ]
  588. },
  589. "tasks": [
  590. "test_task:SECTION"
  591. ],
  592. "sequences": {
  593. "test_sequence": [
  594. "test_task:SECTION"
  595. ]
  596. },
  597. "frequency": "DAILY:FREQUENCY"
  598. }
  599. }
  600. }
  601. """.strip()
  602. Config._serializer = _JsonSerializer()
  603. config_test_scenario()
  604. tf = NamedTemporaryFile()
  605. with open(tf.filename, "w") as fd:
  606. fd.writelines(old_json_config)
  607. Config.restore(tf.filename)
  608. assert Config.unique_sections is not None
  609. assert len(Config.unique_sections) == 2
  610. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  611. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  612. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  613. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  614. assert Config.unique_sections[CoreSection.name].mode == "development"
  615. assert Config.unique_sections[CoreSection.name].version_number == ""
  616. assert Config.unique_sections[CoreSection.name].force is False
  617. assert Config.unique_sections[JobConfig.name].mode == "development"
  618. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  619. assert Config.sections is not None
  620. assert len(Config.sections) == 3
  621. assert Config.sections[DataNodeConfig.name] is not None
  622. assert len(Config.sections[DataNodeConfig.name]) == 3
  623. assert Config.sections[DataNodeConfig.name]["default"] is not None
  624. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  625. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  626. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  627. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  628. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  629. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  630. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  631. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  632. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  633. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  634. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  635. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  636. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  637. assert Config.sections[TaskConfig.name] is not None
  638. assert len(Config.sections[TaskConfig.name]) == 2
  639. assert Config.sections[TaskConfig.name]["default"] is not None
  640. assert Config.sections[TaskConfig.name]["default"].inputs == []
  641. assert Config.sections[TaskConfig.name]["default"].outputs == []
  642. assert Config.sections[TaskConfig.name]["default"].function is None
  643. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  644. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  645. ]
  646. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  647. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  648. ]
  649. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  650. assert Config.sections[ScenarioConfig.name] is not None
  651. assert len(Config.sections[ScenarioConfig.name]) == 2
  652. assert Config.sections[ScenarioConfig.name]["default"] is not None
  653. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  654. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  655. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  656. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  657. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  658. Config.sections[TaskConfig.name]["test_task"].id
  659. ]
  660. assert [
  661. additional_data_node.id
  662. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  663. ] == []
  664. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  665. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  666. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  667. ]
  668. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  669. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  670. }
  671. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  672. "test_json_dn": [compare_function]
  673. }