test_config_serialization.py 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import datetime
  12. import json
  13. from taipy.config import Config
  14. from taipy.config._serializer._json_serializer import _JsonSerializer
  15. from taipy.config.common.frequency import Frequency
  16. from taipy.config.common.scope import Scope
  17. from taipy.core.config import CoreSection, DataNodeConfig, JobConfig, MigrationConfig, ScenarioConfig, TaskConfig
  18. from tests.core.utils.named_temporary_file import NamedTemporaryFile
  19. def multiply(a):
  20. return a * 2
  21. def migrate_csv_path(dn):
  22. dn.path = "foo.csv"
  23. def compare_function(*data_node_results):
  24. comparison_result = {}
  25. current_result_index = 0
  26. for current_result in data_node_results:
  27. comparison_result[current_result_index] = {}
  28. next_result_index = 0
  29. for next_result in data_node_results:
  30. comparison_result[current_result_index][next_result_index] = next_result - current_result
  31. next_result_index += 1
  32. current_result_index += 1
  33. return comparison_result
  34. class CustomClass:
  35. a = None
  36. b = None
  37. class CustomEncoder(json.JSONEncoder):
  38. def default(self, o):
  39. if isinstance(o, datetime):
  40. result = {"__type__": "Datetime", "__value__": o.isoformat()}
  41. else:
  42. result = json.JSONEncoder.default(self, o)
  43. return result
  44. class CustomDecoder(json.JSONDecoder):
  45. def __init__(self, *args, **kwargs):
  46. json.JSONDecoder.__init__(self, *args, **kwargs, object_hook=self.object_hook)
  47. def object_hook(self, source):
  48. if source.get("__type__") == "Datetime":
  49. return datetime.fromisoformat(source.get("__value__"))
  50. else:
  51. return source
  52. def config_test_scenario():
  53. test_csv_dn_cfg = Config.configure_csv_data_node(
  54. id="test_csv_dn",
  55. path="./test.csv",
  56. exposed_type=CustomClass,
  57. scope=Scope.GLOBAL,
  58. validity_period=datetime.timedelta(1),
  59. )
  60. test_json_dn_cfg = Config.configure_json_data_node(
  61. id="test_json_dn",
  62. default_path="./test.json",
  63. encoder=CustomEncoder,
  64. decoder=CustomDecoder,
  65. )
  66. test_pickle_dn_cfg = Config.configure_pickle_data_node(
  67. id="test_pickle_dn",
  68. path="./test.p",
  69. scope=Scope.SCENARIO,
  70. validity_period=datetime.timedelta(1),
  71. )
  72. test_task_cfg = Config.configure_task(
  73. id="test_task", input=test_csv_dn_cfg, function=multiply, output=test_json_dn_cfg
  74. )
  75. test_scenario_cfg = Config.configure_scenario(
  76. id="test_scenario",
  77. task_configs=[test_task_cfg],
  78. additional_data_node_configs=[test_pickle_dn_cfg],
  79. comparators={test_json_dn_cfg.id: compare_function},
  80. frequency=Frequency.DAILY,
  81. )
  82. test_scenario_cfg.add_sequences({"sequence1": [test_task_cfg]})
  83. Config.add_migration_function("1.0", test_csv_dn_cfg, migrate_csv_path)
  84. return test_scenario_cfg
  85. def test_read_write_toml_configuration_file():
  86. expected_toml_config = f"""
  87. [TAIPY]
  88. [JOB]
  89. mode = "development"
  90. max_nb_of_workers = "1:int"
  91. [CORE]
  92. root_folder = "./taipy/"
  93. storage_folder = "user_data/"
  94. taipy_storage_folder = ".taipy/"
  95. repository_type = "filesystem"
  96. read_entity_retry = "0:int"
  97. mode = "development"
  98. version_number = ""
  99. force = "False:bool"
  100. core_version = "{CoreSection._CURRENT_CORE_VERSION}"
  101. [DATA_NODE.default]
  102. storage_type = "pickle"
  103. scope = "SCENARIO:SCOPE"
  104. [DATA_NODE.test_csv_dn]
  105. storage_type = "csv"
  106. scope = "GLOBAL:SCOPE"
  107. validity_period = "1d0h0m0s:timedelta"
  108. path = "./test.csv"
  109. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  110. encoding = "utf-8"
  111. has_header = "True:bool"
  112. [DATA_NODE.test_json_dn]
  113. storage_type = "json"
  114. scope = "SCENARIO:SCOPE"
  115. default_path = "./test.json"
  116. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  117. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  118. encoding = "utf-8"
  119. [DATA_NODE.test_pickle_dn]
  120. storage_type = "pickle"
  121. scope = "SCENARIO:SCOPE"
  122. validity_period = "1d0h0m0s:timedelta"
  123. path = "./test.p"
  124. [TASK.default]
  125. inputs = []
  126. outputs = []
  127. skippable = "False:bool"
  128. [TASK.test_task]
  129. function = "tests.core.config.test_config_serialization.multiply:function"
  130. inputs = [ "test_csv_dn:SECTION",]
  131. outputs = [ "test_json_dn:SECTION",]
  132. skippable = "False:bool"
  133. [SCENARIO.default]
  134. tasks = []
  135. additional_data_nodes = []
  136. [SCENARIO.test_scenario]
  137. tasks = [ "test_task:SECTION",]
  138. additional_data_nodes = [ "test_pickle_dn:SECTION",]
  139. frequency = "DAILY:FREQUENCY"
  140. [VERSION_MIGRATION.migration_fcts."1.0"]
  141. test_csv_dn = "tests.core.config.test_config_serialization.migrate_csv_path:function"
  142. [SCENARIO.default.comparators]
  143. [SCENARIO.default.sequences]
  144. [SCENARIO.test_scenario.comparators]
  145. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  146. [SCENARIO.test_scenario.sequences]
  147. sequence1 = [ "test_task:SECTION",]
  148. """.strip()
  149. config_test_scenario()
  150. tf = NamedTemporaryFile()
  151. Config.backup(tf.filename)
  152. actual_config = tf.read().strip()
  153. assert actual_config == expected_toml_config
  154. Config.load(tf.filename)
  155. tf2 = NamedTemporaryFile()
  156. Config.backup(tf2.filename)
  157. actual_config_2 = tf2.read().strip()
  158. assert actual_config_2 == expected_toml_config
  159. assert Config.unique_sections is not None
  160. assert len(Config.unique_sections) == 3
  161. assert Config.unique_sections[JobConfig.name].mode == "development"
  162. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  163. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  164. assert Config.sections is not None
  165. assert len(Config.sections) == 3
  166. assert Config.sections[DataNodeConfig.name] is not None
  167. assert len(Config.sections[DataNodeConfig.name]) == 4
  168. assert Config.sections[DataNodeConfig.name]["default"] is not None
  169. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  170. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  171. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  172. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  173. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  174. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  175. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  176. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  177. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  178. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  179. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  180. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  181. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  182. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  183. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  184. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  185. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  186. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  187. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  188. assert Config.sections[TaskConfig.name] is not None
  189. assert len(Config.sections[TaskConfig.name]) == 2
  190. assert Config.sections[TaskConfig.name]["default"] is not None
  191. assert Config.sections[TaskConfig.name]["default"].inputs == []
  192. assert Config.sections[TaskConfig.name]["default"].outputs == []
  193. assert Config.sections[TaskConfig.name]["default"].function is None
  194. assert not Config.sections[TaskConfig.name]["default"].skippable
  195. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  196. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  197. ]
  198. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  199. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  200. ]
  201. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  202. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  203. assert Config.sections[ScenarioConfig.name] is not None
  204. assert len(Config.sections[ScenarioConfig.name]) == 2
  205. assert Config.sections[ScenarioConfig.name]["default"] is not None
  206. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  207. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  208. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  209. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  210. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  211. Config.sections[TaskConfig.name]["test_task"].id
  212. ]
  213. assert [
  214. additional_data_node.id
  215. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  216. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  217. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  218. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  219. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  220. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  221. ]
  222. sequences = {}
  223. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  224. sequences[sequence_name] = [task.id for task in sequence_tasks]
  225. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  226. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  227. "test_json_dn": [compare_function]
  228. }
  229. def test_read_write_json_configuration_file():
  230. expected_json_config = (
  231. """{
  232. "TAIPY": {},
  233. "JOB": {
  234. "mode": "development",
  235. "max_nb_of_workers": "1:int"
  236. },
  237. "CORE": {
  238. "root_folder": "./taipy/",
  239. "storage_folder": "user_data/",
  240. "taipy_storage_folder": ".taipy/",
  241. "repository_type": "filesystem",
  242. "read_entity_retry": "0:int",
  243. "mode": "development",
  244. "version_number": "",
  245. "force": "False:bool","""
  246. + f"""
  247. "core_version": "{CoreSection._CURRENT_CORE_VERSION}"
  248. """
  249. + """
  250. },
  251. "VERSION_MIGRATION": {
  252. "migration_fcts": {
  253. "1.0": {
  254. "test_csv_dn": "tests.core.config.test_config_serialization.migrate_csv_path:function"
  255. }
  256. }
  257. },
  258. "DATA_NODE": {
  259. "default": {
  260. "storage_type": "pickle",
  261. "scope": "SCENARIO:SCOPE"
  262. },
  263. "test_csv_dn": {
  264. "storage_type": "csv",
  265. "scope": "GLOBAL:SCOPE",
  266. "validity_period": "1d0h0m0s:timedelta",
  267. "path": "./test.csv",
  268. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  269. "encoding": "utf-8",
  270. "has_header": "True:bool"
  271. },
  272. "test_json_dn": {
  273. "storage_type": "json",
  274. "scope": "SCENARIO:SCOPE",
  275. "default_path": "./test.json",
  276. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  277. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class",
  278. "encoding": "utf-8"
  279. },
  280. "test_pickle_dn": {
  281. "storage_type": "pickle",
  282. "scope": "SCENARIO:SCOPE",
  283. "validity_period": "1d0h0m0s:timedelta",
  284. "path": "./test.p"
  285. }
  286. },
  287. "TASK": {
  288. "default": {
  289. "function": null,
  290. "inputs": [],
  291. "outputs": [],
  292. "skippable": "False:bool"
  293. },
  294. "test_task": {
  295. "function": "tests.core.config.test_config_serialization.multiply:function",
  296. "inputs": [
  297. "test_csv_dn:SECTION"
  298. ],
  299. "outputs": [
  300. "test_json_dn:SECTION"
  301. ],
  302. "skippable": "False:bool"
  303. }
  304. },
  305. "SCENARIO": {
  306. "default": {
  307. "comparators": {},
  308. "tasks": [],
  309. "additional_data_nodes": [],
  310. "frequency": null,
  311. "sequences": {}
  312. },
  313. "test_scenario": {
  314. "comparators": {
  315. "test_json_dn": [
  316. "tests.core.config.test_config_serialization.compare_function:function"
  317. ]
  318. },
  319. "tasks": [
  320. "test_task:SECTION"
  321. ],
  322. "additional_data_nodes": [
  323. "test_pickle_dn:SECTION"
  324. ],
  325. "frequency": "DAILY:FREQUENCY",
  326. "sequences": {
  327. "sequence1": [
  328. "test_task:SECTION"
  329. ]
  330. }
  331. }
  332. }
  333. }
  334. """.strip()
  335. )
  336. Config._serializer = _JsonSerializer()
  337. config_test_scenario()
  338. tf = NamedTemporaryFile()
  339. Config.backup(tf.filename)
  340. actual_config = tf.read().strip()
  341. assert actual_config == expected_json_config
  342. Config.load(tf.filename)
  343. tf2 = NamedTemporaryFile()
  344. Config.backup(tf2.filename)
  345. actual_config_2 = tf2.read().strip()
  346. assert actual_config_2 == expected_json_config
  347. assert Config.unique_sections is not None
  348. assert len(Config.unique_sections) == 3
  349. assert Config.unique_sections[JobConfig.name].mode == "development"
  350. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  351. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  352. assert Config.sections is not None
  353. assert len(Config.sections) == 3
  354. assert Config.sections[DataNodeConfig.name] is not None
  355. assert len(Config.sections[DataNodeConfig.name]) == 4
  356. assert Config.sections[DataNodeConfig.name]["default"] is not None
  357. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  358. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  359. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  360. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  361. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  362. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  363. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  364. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].encoding == "utf-8"
  365. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  366. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  367. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  368. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  369. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoding == "utf-8"
  370. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  371. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  372. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].storage_type == "pickle"
  373. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].scope == Scope.SCENARIO
  374. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].validity_period == datetime.timedelta(1)
  375. assert Config.sections[DataNodeConfig.name]["test_pickle_dn"].path == "./test.p"
  376. assert Config.sections[TaskConfig.name] is not None
  377. assert len(Config.sections[TaskConfig.name]) == 2
  378. assert Config.sections[TaskConfig.name]["default"] is not None
  379. assert Config.sections[TaskConfig.name]["default"].inputs == []
  380. assert Config.sections[TaskConfig.name]["default"].outputs == []
  381. assert Config.sections[TaskConfig.name]["default"].function is None
  382. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  383. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  384. ]
  385. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  386. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  387. ]
  388. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  389. assert Config.sections[ScenarioConfig.name] is not None
  390. assert len(Config.sections[ScenarioConfig.name]) == 2
  391. assert Config.sections[ScenarioConfig.name]["default"] is not None
  392. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  393. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  394. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  395. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  396. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  397. Config.sections[TaskConfig.name]["test_task"].id
  398. ]
  399. assert [
  400. additional_data_node.id
  401. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  402. ] == [Config.sections[DataNodeConfig.name]["test_pickle_dn"].id]
  403. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  404. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  405. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  406. Config.sections[DataNodeConfig.name]["test_pickle_dn"].id,
  407. ]
  408. sequences = {}
  409. for sequence_name, sequence_tasks in Config.sections[ScenarioConfig.name]["test_scenario"].sequences.items():
  410. sequences[sequence_name] = [task.id for task in sequence_tasks]
  411. assert sequences == {"sequence1": [Config.sections[TaskConfig.name]["test_task"].id]}
  412. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  413. "test_json_dn": [compare_function]
  414. }
  415. def test_read_write_toml_configuration_file_migrate_sequence_in_scenario():
  416. old_toml_config = """
  417. [TAIPY]
  418. [JOB]
  419. mode = "development"
  420. max_nb_of_workers = "1:int"
  421. [CORE]
  422. root_folder = "./taipy/"
  423. storage_folder = ".data/"
  424. repository_type = "filesystem"
  425. mode = "development"
  426. version_number = ""
  427. force = "False:bool"
  428. [DATA_NODE.default]
  429. storage_type = "pickle"
  430. scope = "SCENARIO:SCOPE"
  431. [DATA_NODE.test_csv_dn]
  432. storage_type = "csv"
  433. scope = "GLOBAL:SCOPE"
  434. validity_period = "1d0h0m0s:timedelta"
  435. path = "./test.csv"
  436. exposed_type = "tests.core.config.test_config_serialization.CustomClass:class"
  437. has_header = "True:bool"
  438. [DATA_NODE.test_json_dn]
  439. storage_type = "json"
  440. scope = "SCENARIO:SCOPE"
  441. default_path = "./test.json"
  442. encoder = "tests.core.config.test_config_serialization.CustomEncoder:class"
  443. decoder = "tests.core.config.test_config_serialization.CustomDecoder:class"
  444. [TASK.default]
  445. inputs = []
  446. outputs = []
  447. skippable = "False:bool"
  448. [TASK.test_task]
  449. function = "tests.core.config.test_config_serialization.multiply:function"
  450. inputs = [ "test_csv_dn:SECTION",]
  451. outputs = [ "test_json_dn:SECTION",]
  452. skippable = "False:bool"
  453. [SCENARIO.default]
  454. [SCENARIO.test_scenario]
  455. tasks = [ "test_task:SECTION",]
  456. sequences.test_sequence = [ "test_task:SECTION",]
  457. frequency = "DAILY:FREQUENCY"
  458. [VERSION_MIGRATION.migration_fcts."1.0"]
  459. test_csv_dn = "tests.core.config.test_config_serialization.migrate_csv_path:function"
  460. [SCENARIO.default.comparators]
  461. [SCENARIO.test_scenario.comparators]
  462. test_json_dn = [ "tests.core.config.test_config_serialization.compare_function:function",]
  463. """.strip()
  464. config_test_scenario()
  465. tf = NamedTemporaryFile()
  466. with open(tf.filename, "w") as fd:
  467. fd.writelines(old_toml_config)
  468. Config.restore(tf.filename)
  469. assert Config.unique_sections is not None
  470. assert len(Config.unique_sections) == 3
  471. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  472. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  473. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  474. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  475. assert Config.unique_sections[CoreSection.name].mode == "development"
  476. assert Config.unique_sections[CoreSection.name].version_number == ""
  477. assert Config.unique_sections[CoreSection.name].force is False
  478. assert Config.unique_sections[JobConfig.name].mode == "development"
  479. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  480. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  481. assert Config.sections is not None
  482. assert len(Config.sections) == 3
  483. assert Config.sections[DataNodeConfig.name] is not None
  484. assert len(Config.sections[DataNodeConfig.name]) == 3
  485. assert Config.sections[DataNodeConfig.name]["default"] is not None
  486. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  487. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  488. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  489. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  490. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  491. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  492. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  493. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  494. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  495. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  496. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  497. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  498. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  499. assert Config.sections[TaskConfig.name] is not None
  500. assert len(Config.sections[TaskConfig.name]) == 2
  501. assert Config.sections[TaskConfig.name]["default"] is not None
  502. assert Config.sections[TaskConfig.name]["default"].inputs == []
  503. assert Config.sections[TaskConfig.name]["default"].outputs == []
  504. assert Config.sections[TaskConfig.name]["default"].function is None
  505. assert not Config.sections[TaskConfig.name]["default"].skippable
  506. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  507. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  508. ]
  509. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  510. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  511. ]
  512. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  513. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  514. assert Config.sections[ScenarioConfig.name] is not None
  515. assert len(Config.sections[ScenarioConfig.name]) == 2
  516. assert Config.sections[ScenarioConfig.name]["default"] is not None
  517. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  518. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  519. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  520. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  521. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  522. Config.sections[TaskConfig.name]["test_task"].id
  523. ]
  524. assert [
  525. additional_data_node.id
  526. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  527. ] == []
  528. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  529. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  530. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  531. ]
  532. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  533. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  534. }
  535. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  536. "test_json_dn": [compare_function]
  537. }
  538. def test_read_write_json_configuration_file_migrate_sequence_in_scenario():
  539. old_json_config = """
  540. {
  541. "TAIPY": {},
  542. "JOB": {
  543. "mode": "development",
  544. "max_nb_of_workers": "1:int"
  545. },
  546. "CORE": {
  547. "root_folder": "./taipy/",
  548. "storage_folder": ".data/",
  549. "repository_type": "filesystem",
  550. "read_entity_retry": "0:int",
  551. "mode": "development",
  552. "version_number": "",
  553. "force": "False:bool"
  554. },
  555. "VERSION_MIGRATION": {
  556. "migration_fcts": {
  557. "1.0": {
  558. "test_csv_dn": "tests.core.config.test_config_serialization.migrate_csv_path:function"
  559. }
  560. }
  561. },
  562. "DATA_NODE": {
  563. "default": {
  564. "storage_type": "pickle",
  565. "scope": "SCENARIO:SCOPE"
  566. },
  567. "test_csv_dn": {
  568. "storage_type": "csv",
  569. "scope": "GLOBAL:SCOPE",
  570. "validity_period": "1d0h0m0s:timedelta",
  571. "path": "./test.csv",
  572. "exposed_type": "tests.core.config.test_config_serialization.CustomClass:class",
  573. "has_header": "True:bool"
  574. },
  575. "test_json_dn": {
  576. "storage_type": "json",
  577. "scope": "SCENARIO:SCOPE",
  578. "default_path": "./test.json",
  579. "encoder": "tests.core.config.test_config_serialization.CustomEncoder:class",
  580. "decoder": "tests.core.config.test_config_serialization.CustomDecoder:class"
  581. }
  582. },
  583. "TASK": {
  584. "default": {
  585. "function": null,
  586. "inputs": [],
  587. "outputs": [],
  588. "skippable": "False:bool"
  589. },
  590. "test_task": {
  591. "function": "tests.core.config.test_config_serialization.multiply:function",
  592. "inputs": [
  593. "test_csv_dn:SECTION"
  594. ],
  595. "outputs": [
  596. "test_json_dn:SECTION"
  597. ],
  598. "skippable": "False:bool"
  599. }
  600. },
  601. "SCENARIO": {
  602. "default": {
  603. "comparators": {},
  604. "sequences": {},
  605. "frequency": null
  606. },
  607. "test_scenario": {
  608. "comparators": {
  609. "test_json_dn": [
  610. "tests.core.config.test_config_serialization.compare_function:function"
  611. ]
  612. },
  613. "tasks": [
  614. "test_task:SECTION"
  615. ],
  616. "sequences": {
  617. "test_sequence": [
  618. "test_task:SECTION"
  619. ]
  620. },
  621. "frequency": "DAILY:FREQUENCY"
  622. }
  623. }
  624. }
  625. """.strip()
  626. Config._serializer = _JsonSerializer()
  627. config_test_scenario()
  628. tf = NamedTemporaryFile()
  629. with open(tf.filename, "w") as fd:
  630. fd.writelines(old_json_config)
  631. Config.restore(tf.filename)
  632. assert Config.unique_sections is not None
  633. assert len(Config.unique_sections) == 3
  634. assert Config.unique_sections[CoreSection.name].root_folder == "./taipy/"
  635. assert Config.unique_sections[CoreSection.name].storage_folder == ".data/"
  636. assert Config.unique_sections[CoreSection.name].repository_type == "filesystem"
  637. assert Config.unique_sections[CoreSection.name].repository_properties == {}
  638. assert Config.unique_sections[CoreSection.name].mode == "development"
  639. assert Config.unique_sections[CoreSection.name].version_number == ""
  640. assert Config.unique_sections[CoreSection.name].force is False
  641. assert Config.unique_sections[JobConfig.name].mode == "development"
  642. assert Config.unique_sections[JobConfig.name].max_nb_of_workers == 1
  643. assert Config.unique_sections[MigrationConfig.name].migration_fcts["1.0"] == {"test_csv_dn": migrate_csv_path}
  644. assert Config.sections is not None
  645. assert len(Config.sections) == 3
  646. assert Config.sections[DataNodeConfig.name] is not None
  647. assert len(Config.sections[DataNodeConfig.name]) == 3
  648. assert Config.sections[DataNodeConfig.name]["default"] is not None
  649. assert Config.sections[DataNodeConfig.name]["default"].storage_type == "pickle"
  650. assert Config.sections[DataNodeConfig.name]["default"].scope == Scope.SCENARIO
  651. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].storage_type == "csv"
  652. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].scope == Scope.GLOBAL
  653. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].validity_period == datetime.timedelta(1)
  654. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].has_header is True
  655. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].path == "./test.csv"
  656. assert Config.sections[DataNodeConfig.name]["test_csv_dn"].exposed_type == CustomClass
  657. assert Config.sections[DataNodeConfig.name]["test_json_dn"].storage_type == "json"
  658. assert Config.sections[DataNodeConfig.name]["test_json_dn"].scope == Scope.SCENARIO
  659. assert Config.sections[DataNodeConfig.name]["test_json_dn"].default_path == "./test.json"
  660. assert Config.sections[DataNodeConfig.name]["test_json_dn"].encoder == CustomEncoder
  661. assert Config.sections[DataNodeConfig.name]["test_json_dn"].decoder == CustomDecoder
  662. assert Config.sections[TaskConfig.name] is not None
  663. assert len(Config.sections[TaskConfig.name]) == 2
  664. assert Config.sections[TaskConfig.name]["default"] is not None
  665. assert Config.sections[TaskConfig.name]["default"].inputs == []
  666. assert Config.sections[TaskConfig.name]["default"].outputs == []
  667. assert Config.sections[TaskConfig.name]["default"].function is None
  668. assert [inp.id for inp in Config.sections[TaskConfig.name]["test_task"].inputs] == [
  669. Config.sections[DataNodeConfig.name]["test_csv_dn"].id
  670. ]
  671. assert [outp.id for outp in Config.sections[TaskConfig.name]["test_task"].outputs] == [
  672. Config.sections[DataNodeConfig.name]["test_json_dn"].id
  673. ]
  674. assert Config.sections[TaskConfig.name]["test_task"].function == multiply
  675. assert Config.sections[ScenarioConfig.name] is not None
  676. assert len(Config.sections[ScenarioConfig.name]) == 2
  677. assert Config.sections[ScenarioConfig.name]["default"] is not None
  678. assert Config.sections[ScenarioConfig.name]["default"].tasks == []
  679. assert Config.sections[ScenarioConfig.name]["default"].additional_data_nodes == []
  680. assert Config.sections[ScenarioConfig.name]["default"].data_nodes == []
  681. assert len(Config.sections[ScenarioConfig.name]["default"].comparators) == 0
  682. assert [task.id for task in Config.sections[ScenarioConfig.name]["test_scenario"].tasks] == [
  683. Config.sections[TaskConfig.name]["test_task"].id
  684. ]
  685. assert [
  686. additional_data_node.id
  687. for additional_data_node in Config.sections[ScenarioConfig.name]["test_scenario"].additional_data_nodes
  688. ] == []
  689. assert sorted([data_node.id for data_node in Config.sections[ScenarioConfig.name]["test_scenario"].data_nodes]) == [
  690. Config.sections[DataNodeConfig.name]["test_csv_dn"].id,
  691. Config.sections[DataNodeConfig.name]["test_json_dn"].id,
  692. ]
  693. assert Config.sections[ScenarioConfig.name]["test_scenario"].sequences == {
  694. "test_sequence": [Config.sections[TaskConfig.name]["test_task"]]
  695. }
  696. assert dict(Config.sections[ScenarioConfig.name]["test_scenario"].comparators) == {
  697. "test_json_dn": [compare_function]
  698. }