test_config_serialization.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800
  1. # Copyright 2023 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import datetime
  12. import json
  13. from taipy.config import Config
  14. from taipy.config._serializer._json_serializer import _JsonSerializer
  15. from taipy.config.common.frequency import Frequency
  16. from taipy.config.common.scope import Scope
  17. from taipy.core.config import CoreSection, DataNodeConfig, JobConfig, MigrationConfig, ScenarioConfig, TaskConfig
  18. from tests.core.utils.named_temporary_file import NamedTemporaryFile
  19. def multiply(a):
  20. return a * 2
  21. def migrate_csv_path(dn):
  22. dn.path = "foo.csv"
  23. def compare_function(*data_node_results):
  24. comparison_result = {}
  25. current_result_index = 0
  26. for current_result in data_node_results:
  27. comparison_result[current_result_index] = {}
  28. next_result_index = 0
  29. for next_result in data_node_results:
  30. comparison_result[current_result_index][next_result_index] = next_result - current_result
  31. next_result_index += 1
  32. current_result_index += 1
  33. return comparison_result
  34. class CustomClass:
  35. a = None
  36. b = None
  37. class CustomEncoder(json.JSONEncoder):
  38. def default(self, o):
  39. if isinstance(o, datetime):
  40. result = {"__type__": "Datetime", "__value__": o.isoformat()}
  41. else:
  42. result = json.JSONEncoder.default(self, o)
  43. return result
  44. class CustomDecoder(json.JSONDecoder):
  45. def __init__(self, *args, **kwargs):
  46. json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
  47. def object_hook(self, source):
  48. if source.get("__type__") == "Datetime":
  49. return datetime.fromisoformat(source.get("__value__"))
  50. else:
  51. return source
  52. def config_test_scenario():
  53. test_csv_dn_cfg = Config.configure_csv_data_node(
  54. id="test_csv_dn",
  55. path="./test.csv",
  56. exposed_type=CustomClass,
  57. scope=Scope.GLOBAL,
  58. validity_period=datetime.timedelta(1),
  59. )
  60. test_json_dn_cfg = Config.configure_json_data_node(
  61. id="test_json_dn",
  62. default_path="./test.json",
  63. encoder=CustomEncoder,
  64. decoder=CustomDecoder,
  65. )
  66. test_pickle_dn_cfg = Config.configure_pickle_data_node(
  67. id="test_pickle_dn",
  68. path="./test.p",
  69. scope=Scope.SCENARIO,
  70. validity_period=datetime.timedelta(1),
  71. )
  72. test_task_cfg = Config.configure_task(
  73. id="test_task", input=test_csv_dn_cfg, function=multiply, output=test_json_dn_cfg
  74. )
  75. test_scenario_cfg = Config.configure_scenario(
  76. id="test_scenario",
  77. task_configs=[test_task_cfg],
  78. additional_data_node_configs=[test_pickle_dn_cfg],
  79. comparators={test_json_dn_cfg.id: compare_function},
  80. frequency=Frequency.DAILY,
  81. )
  82. test_scenario_cfg.add_sequences({"sequence1": [test_task_cfg]})
  83. Config.add_migration_function("1.0", test_csv_dn_cfg, migrate_csv_path)
  84. return test_scenario_cfg
  85. def test_read_write_toml_configuration_file():
  86. expected_toml_config = f"""
  87. [TAIPY]
  88. [JOB]
  89. mode = "development"
  90. max_nb_of_workers = "1:int"
  91. [CORE]
  92. root_folder = "./taipy/"
  93. storage_folder = ".data/"
  94. repository_type = "filesystem"
  95. read_entity_retry = "0:int"
  96. mode = "development"
  97. version_number = ""
  98. force = "False:bool"
  99. core_version = "{CoreSection._CURRENT_CORE_VERSION}"
  100. [DATA_NODE.default]
  101. storage_type = "pickle"
  102. scope = "SCENARIO:SCOPE"
  103. [DATA_NODE.test_csv_dn]
  104. storage_type = "csv"
  105. scope = "GLOBAL:SCOPE"
  106. validity_period = "1d0h0m0s:timedelta"
  107. path = "./test.csv"
  108. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  109. encoding = "utf-8"
  110. has_header = "True:bool"
  111. [DATA_NODE.test_json_dn]
  112. storage_type = "json"
  113. scope = "SCENARIO:SCOPE"
  114. default_path = "./test.json"
  115. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  116. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  117. encoding = "utf-8"
  118. [DATA_NODE.test_pickle_dn]
  119. storage_type = "pickle"
  120. scope = "SCENARIO:SCOPE"
  121. validity_period = "1d0h0m0s:timedelta"
  122. path = "./test.p"
  123. [TASK.default]
  124. inputs = []
  125. outputs = []
  126. skippable = "False:bool"
  127. [TASK.test_task]
  128. function = "tests.core.config.test_config_serialization.multiply:function"
  129. inputs = [ "test_csv_dn:SECTION",]
  130. outputs = [ "test_json_dn:SECTION",]
  131. skippable = "False:bool"
  132. [SCENARIO.default]
  133. tasks = []
  134. additional_data_nodes = []
  135. [SCENARIO.test_scenario]
  136. tasks = [ "test_task:SECTION",]
  137. additional_data_nodes = [ "test_pickle_dn:SECTION",]
  138. frequency = "DAILY:FREQUENCY"
  139. [VERSION_MIGRATION.migration_fcts."1.0"]
  140. test_csv_dn = "tests.core.config.test_config_serialization.migrate_csv_path:function"
  141. [SCENARIO.default.comparators]
  142. [SCENARIO.default.sequences]
  143. [SCENARIO.test_scenario.comparators]
  144. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  145. [SCENARIO.test_scenario.sequences]
  146. sequence1 = [ "test_task:SECTION",]
  147. """.strip()
  148. config_test_scenario()
  149. tf = NamedTemporaryFile()
  150. Config.backup(tf.filename)
  151. actual_config = tf.read().strip()
  152. assert actual_config == expected_toml_config
  153. Config.load(tf.filename)
  154. tf2 = NamedTemporaryFile()
  155. Config.backup(tf2.filename)
  156. actual_config_2 = tf2.read().strip()
  157. assert actual_config_2 == expected_toml_config
  158. assert Config.unique_sections is not None
  159. assert len(Config.unique_sections) == 3
  160. assert Config.unique_sections[JobConfig.name].mode == "development"
  161. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  162. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  163. assert Config.sections is not None
  164. assert len(Config.sections) == 3
  165. assert Config.sections[DataNodeConfig.name] is not None
  166. assert len(Config.sections[DataNodeConfig.name]) == 4
  167. assert Config.sections[DataNodeConfig.name]["default"] is not None
  168. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  169. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  170. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  171. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  172. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  173. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  174. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  175. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  176. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  177. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  178. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  179. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  180. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  181. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  182. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  183. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  184. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  185. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  186. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  187. assert Config.sections[TaskConfig.name] is not None
  188. assert len(Config.sections[TaskConfig.name]) == 2
  189. assert Config.sections[TaskConfig.name]["default"] is not None
  190. assert Config.sections[TaskConfig.name]["default"].inputs == []
  191. assert Config.sections[TaskConfig.name]["default"].outputs == []
  192. assert Config.sections[TaskConfig.name]["default"].function is None
  193. assert not Config.sections[TaskConfig.name]["default"].skippable
  194. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  195. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  196. ]
  197. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  198. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  199. ]
  200. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  201. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  202. assert Config.sections[ScenarioConfig.name] is not None
  203. assert len(Config.sections[ScenarioConfig.name]) == 2
  204. assert Config.sections[ScenarioConfig.name]["default"] is not None
  205. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  206. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  207. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  208. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  209. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  210. Config.sections[TaskConfig.name]["test_task"].id
  211. ]
  212. assert [
  213. additional_data_node.id
  214. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  215. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  216. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  217. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  218. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  219. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  220. ]
  221. sequences = {}
  222. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  223. sequences[sequence_name] = [task.id for task in sequence_tasks]
  224. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  225. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  226. "test_json_dn": [compare_function]
  227. }
  228. def test_read_write_json_configuration_file():
  229. expected_json_config = (
  230. """{
  231. "TAIPY": {},
  232. "JOB": {
  233. "mode": "development",
  234. "max_nb_of_workers": "1:int"
  235. },
  236. "CORE": {
  237. "root_folder": "./taipy/",
  238. "storage_folder": ".data/",
  239. "repository_type": "filesystem",
  240. "read_entity_retry": "0:int",
  241. "mode": "development",
  242. "version_number": "",
  243. "force": "False:bool","""
  244. + f"""
  245. "core_version": "{CoreSection._CURRENT_CORE_VERSION}"
  246. """
  247. + """
  248. },
  249. "VERSION_MIGRATION": {
  250. "migration_fcts": {
  251. "1.0": {
  252. "test_csv_dn": "tests.core.config.test_config_serialization.migrate_csv_path:function"
  253. }
  254. }
  255. },
  256. "DATA_NODE": {
  257. "default": {
  258. "storage_type": "pickle",
  259. "scope": "SCENARIO:SCOPE"
  260. },
  261. "test_csv_dn": {
  262. "storage_type": "csv",
  263. "scope": "GLOBAL:SCOPE",
  264. "validity_period": "1d0h0m0s:timedelta",
  265. "path": "./test.csv",
  266. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  267. "encoding": "utf-8",
  268. "has_header": "True:bool"
  269. },
  270. "test_json_dn": {
  271. "storage_type": "json",
  272. "scope": "SCENARIO:SCOPE",
  273. "default_path": "./test.json",
  274. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  275. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class",
  276. "encoding": "utf-8"
  277. },
  278. "test_pickle_dn": {
  279. "storage_type": "pickle",
  280. "scope": "SCENARIO:SCOPE",
  281. "validity_period": "1d0h0m0s:timedelta",
  282. "path": "./test.p"
  283. }
  284. },
  285. "TASK": {
  286. "default": {
  287. "function": null,
  288. "inputs": [],
  289. "outputs": [],
  290. "skippable": "False:bool"
  291. },
  292. "test_task": {
  293. "function": "tests.core.config.test_config_serialization.multiply:function",
  294. "inputs": [
  295. "test_csv_dn:SECTION"
  296. ],
  297. "outputs": [
  298. "test_json_dn:SECTION"
  299. ],
  300. "skippable": "False:bool"
  301. }
  302. },
  303. "SCENARIO": {
  304. "default": {
  305. "comparators": {},
  306. "tasks": [],
  307. "additional_data_nodes": [],
  308. "frequency": null,
  309. "sequences": {}
  310. },
  311. "test_scenario": {
  312. "comparators": {
  313. "test_json_dn": [
  314. "tests.core.config.test_config_serialization.compare_function:function"
  315. ]
  316. },
  317. "tasks": [
  318. "test_task:SECTION"
  319. ],
  320. "additional_data_nodes": [
  321. "test_pickle_dn:SECTION"
  322. ],
  323. "frequency": "DAILY:FREQUENCY",
  324. "sequences": {
  325. "sequence1": [
  326. "test_task:SECTION"
  327. ]
  328. }
  329. }
  330. }
  331. }
  332. """.strip()
  333. )
  334. Config._serializer = _JsonSerializer()
  335. config_test_scenario()
  336. tf = NamedTemporaryFile()
  337. Config.backup(tf.filename)
  338. actual_config = tf.read().strip()
  339. assert actual_config == expected_json_config
  340. Config.load(tf.filename)
  341. tf2 = NamedTemporaryFile()
  342. Config.backup(tf2.filename)
  343. actual_config_2 = tf2.read().strip()
  344. assert actual_config_2 == expected_json_config
  345. assert Config.unique_sections is not None
  346. assert len(Config.unique_sections) == 3
  347. assert Config.unique_sections[JobConfig.name].mode == "development"
  348. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  349. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  350. assert Config.sections is not None
  351. assert len(Config.sections) == 3
  352. assert Config.sections[DataNodeConfig.name] is not None
  353. assert len(Config.sections[DataNodeConfig.name]) == 4
  354. assert Config.sections[DataNodeConfig.name]["default"] is not None
  355. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  356. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  357. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  358. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  359. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  360. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  361. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  362. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  363. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  364. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  365. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  366. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  367. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  368. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  369. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  370. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  371. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  372. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  373. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  374. assert Config.sections[TaskConfig.name] is not None
  375. assert len(Config.sections[TaskConfig.name]) == 2
  376. assert Config.sections[TaskConfig.name]["default"] is not None
  377. assert Config.sections[TaskConfig.name]["default"].inputs == []
  378. assert Config.sections[TaskConfig.name]["default"].outputs == []
  379. assert Config.sections[TaskConfig.name]["default"].function is None
  380. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  381. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  382. ]
  383. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  384. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  385. ]
  386. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  387. assert Config.sections[ScenarioConfig.name] is not None
  388. assert len(Config.sections[ScenarioConfig.name]) == 2
  389. assert Config.sections[ScenarioConfig.name]["default"] is not None
  390. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  391. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  392. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  393. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  394. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  395. Config.sections[TaskConfig.name]["test_task"].id
  396. ]
  397. assert [
  398. additional_data_node.id
  399. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  400. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  401. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  402. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  403. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  404. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  405. ]
  406. sequences = {}
  407. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  408. sequences[sequence_name] = [task.id for task in sequence_tasks]
  409. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  410. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  411. "test_json_dn": [compare_function]
  412. }
  413. def test_read_write_toml_configuration_file_migrate_sequence_in_scenario():
  414. old_toml_config = """
  415. [TAIPY]
  416. [JOB]
  417. mode = "development"
  418. max_nb_of_workers = "1:int"
  419. [CORE]
  420. root_folder = "./taipy/"
  421. storage_folder = ".data/"
  422. repository_type = "filesystem"
  423. mode = "development"
  424. version_number = ""
  425. force = "False:bool"
  426. [DATA_NODE.default]
  427. storage_type = "pickle"
  428. scope = "SCENARIO:SCOPE"
  429. [DATA_NODE.test_csv_dn]
  430. storage_type = "csv"
  431. scope = "GLOBAL:SCOPE"
  432. validity_period = "1d0h0m0s:timedelta"
  433. path = "./test.csv"
  434. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  435. has_header = "True:bool"
  436. [DATA_NODE.test_json_dn]
  437. storage_type = "json"
  438. scope = "SCENARIO:SCOPE"
  439. default_path = "./test.json"
  440. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  441. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  442. [TASK.default]
  443. inputs = []
  444. outputs = []
  445. skippable = "False:bool"
  446. [TASK.test_task]
  447. function = "tests.core.config.test_config_serialization.multiply:function"
  448. inputs = [ "test_csv_dn:SECTION",]
  449. outputs = [ "test_json_dn:SECTION",]
  450. skippable = "False:bool"
  451. [SCENARIO.default]
  452. [SCENARIO.test_scenario]
  453. tasks = [ "test_task:SECTION",]
  454. sequences.test_sequence = [ "test_task:SECTION",]
  455. frequency = "DAILY:FREQUENCY"
  456. [VERSION_MIGRATION.migration_fcts."1.0"]
  457. test_csv_dn = "tests.core.config.test_config_serialization.migrate_csv_path:function"
  458. [SCENARIO.default.comparators]
  459. [SCENARIO.test_scenario.comparators]
  460. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  461. """.strip()
  462. config_test_scenario()
  463. tf = NamedTemporaryFile()
  464. with open(tf.filename, "w") as fd:
  465. fd.writelines(old_toml_config)
  466. Config.restore(tf.filename)
  467. assert Config.unique_sections is not None
  468. assert len(Config.unique_sections) == 3
  469. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  470. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  471. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  472. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  473. assert Config.unique_sections[CoreSection.name].mode == "development"
  474. assert Config.unique_sections[CoreSection.name].version_number == ""
  475. assert Config.unique_sections[CoreSection.name].force is False
  476. assert Config.unique_sections[JobConfig.name].mode == "development"
  477. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  478. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  479. assert Config.sections is not None
  480. assert len(Config.sections) == 3
  481. assert Config.sections[DataNodeConfig.name] is not None
  482. assert len(Config.sections[DataNodeConfig.name]) == 3
  483. assert Config.sections[DataNodeConfig.name]["default"] is not None
  484. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  485. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  486. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  487. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  488. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  489. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  490. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  491. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  492. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  493. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  494. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  495. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  496. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  497. assert Config.sections[TaskConfig.name] is not None
  498. assert len(Config.sections[TaskConfig.name]) == 2
  499. assert Config.sections[TaskConfig.name]["default"] is not None
  500. assert Config.sections[TaskConfig.name]["default"].inputs == []
  501. assert Config.sections[TaskConfig.name]["default"].outputs == []
  502. assert Config.sections[TaskConfig.name]["default"].function is None
  503. assert not Config.sections[TaskConfig.name]["default"].skippable
  504. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  505. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  506. ]
  507. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  508. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  509. ]
  510. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  511. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  512. assert Config.sections[ScenarioConfig.name] is not None
  513. assert len(Config.sections[ScenarioConfig.name]) == 2
  514. assert Config.sections[ScenarioConfig.name]["default"] is not None
  515. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  516. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  517. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  518. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  519. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  520. Config.sections[TaskConfig.name]["test_task"].id
  521. ]
  522. assert [
  523. additional_data_node.id
  524. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  525. ] == []
  526. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  527. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  528. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  529. ]
  530. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  531. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  532. }
  533. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  534. "test_json_dn": [compare_function]
  535. }
  536. def test_read_write_json_configuration_file_migrate_sequence_in_scenario():
  537. old_json_config = """
  538. {
  539. "TAIPY": {},
  540. "JOB": {
  541. "mode": "development",
  542. "max_nb_of_workers": "1:int"
  543. },
  544. "CORE": {
  545. "root_folder": "./taipy/",
  546. "storage_folder": ".data/",
  547. "repository_type": "filesystem",
  548. "read_entity_retry": "0:int",
  549. "mode": "development",
  550. "version_number": "",
  551. "force": "False:bool"
  552. },
  553. "VERSION_MIGRATION": {
  554. "migration_fcts": {
  555. "1.0": {
  556. "test_csv_dn": "tests.core.config.test_config_serialization.migrate_csv_path:function"
  557. }
  558. }
  559. },
  560. "DATA_NODE": {
  561. "default": {
  562. "storage_type": "pickle",
  563. "scope": "SCENARIO:SCOPE"
  564. },
  565. "test_csv_dn": {
  566. "storage_type": "csv",
  567. "scope": "GLOBAL:SCOPE",
  568. "validity_period": "1d0h0m0s:timedelta",
  569. "path": "./test.csv",
  570. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  571. "has_header": "True:bool"
  572. },
  573. "test_json_dn": {
  574. "storage_type": "json",
  575. "scope": "SCENARIO:SCOPE",
  576. "default_path": "./test.json",
  577. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  578. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class"
  579. }
  580. },
  581. "TASK": {
  582. "default": {
  583. "function": null,
  584. "inputs": [],
  585. "outputs": [],
  586. "skippable": "False:bool"
  587. },
  588. "test_task": {
  589. "function": "tests.core.config.test_config_serialization.multiply:function",
  590. "inputs": [
  591. "test_csv_dn:SECTION"
  592. ],
  593. "outputs": [
  594. "test_json_dn:SECTION"
  595. ],
  596. "skippable": "False:bool"
  597. }
  598. },
  599. "SCENARIO": {
  600. "default": {
  601. "comparators": {},
  602. "sequences": {},
  603. "frequency": null
  604. },
  605. "test_scenario": {
  606. "comparators": {
  607. "test_json_dn": [
  608. "tests.core.config.test_config_serialization.compare_function:function"
  609. ]
  610. },
  611. "tasks": [
  612. "test_task:SECTION"
  613. ],
  614. "sequences": {
  615. "test_sequence": [
  616. "test_task:SECTION"
  617. ]
  618. },
  619. "frequency": "DAILY:FREQUENCY"
  620. }
  621. }
  622. }
  623. """.strip()
  624. Config._serializer = _JsonSerializer()
  625. config_test_scenario()
  626. tf = NamedTemporaryFile()
  627. with open(tf.filename, "w") as fd:
  628. fd.writelines(old_json_config)
  629. Config.restore(tf.filename)
  630. assert Config.unique_sections is not None
  631. assert len(Config.unique_sections) == 3
  632. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  633. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  634. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  635. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  636. assert Config.unique_sections[CoreSection.name].mode == "development"
  637. assert Config.unique_sections[CoreSection.name].version_number == ""
  638. assert Config.unique_sections[CoreSection.name].force is False
  639. assert Config.unique_sections[JobConfig.name].mode == "development"
  640. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  641. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  642. assert Config.sections is not None
  643. assert len(Config.sections) == 3
  644. assert Config.sections[DataNodeConfig.name] is not None
  645. assert len(Config.sections[DataNodeConfig.name]) == 3
  646. assert Config.sections[DataNodeConfig.name]["default"] is not None
  647. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  648. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  649. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  650. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  651. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  652. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  653. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  654. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  655. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  656. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  657. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  658. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  659. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  660. assert Config.sections[TaskConfig.name] is not None
  661. assert len(Config.sections[TaskConfig.name]) == 2
  662. assert Config.sections[TaskConfig.name]["default"] is not None
  663. assert Config.sections[TaskConfig.name]["default"].inputs == []
  664. assert Config.sections[TaskConfig.name]["default"].outputs == []
  665. assert Config.sections[TaskConfig.name]["default"].function is None
  666. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  667. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  668. ]
  669. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  670. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  671. ]
  672. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  673. assert Config.sections[ScenarioConfig.name] is not None
  674. assert len(Config.sections[ScenarioConfig.name]) == 2
  675. assert Config.sections[ScenarioConfig.name]["default"] is not None
  676. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  677. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  678. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  679. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  680. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  681. Config.sections[TaskConfig.name]["test_task"].id
  682. ]
  683. assert [
  684. additional_data_node.id
  685. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  686. ] == []
  687. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  688. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  689. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  690. ]
  691. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  692. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  693. }
  694. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  695. "test_json_dn": [compare_function]
  696. }