test_configure_default_config.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653
  1. # Copyright 2023 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import json
  12. from datetime import timedelta
  13. from taipy.config.common.scope import Scope
  14. from taipy.config.config import Config
  15. from taipy.core.common.mongo_default_document import MongoDefaultDocument
  16. def test_set_default_data_node_configuration():
  17. data_node1 = Config.configure_data_node(id="input_data1")
  18. assert data_node1.storage_type == "pickle"
  19. assert data_node1.scope == Scope.SCENARIO
  20. assert data_node1.validity_period is None
  21. Config.set_default_data_node_configuration("in_memory", scope=Scope.GLOBAL)
  22. data_node2 = Config.configure_data_node(id="input_data2")
  23. assert data_node2.storage_type == "in_memory"
  24. assert data_node2.scope == Scope.GLOBAL
  25. assert data_node2.validity_period is None
  26. Config.set_default_data_node_configuration("csv")
  27. data_node3 = Config.configure_data_node(id="input_data3")
  28. assert data_node3.storage_type == "csv"
  29. assert data_node3.scope == Scope.SCENARIO
  30. assert data_node3.validity_period is None
  31. Config.set_default_data_node_configuration("json", validity_period=timedelta(1))
  32. data_node4 = Config.configure_data_node(id="input_data4")
  33. assert data_node4.storage_type == "json"
  34. assert data_node4.scope == Scope.SCENARIO
  35. assert data_node4.validity_period == timedelta(1)
  36. def test_set_default_data_node_configuration_replace_old_default_config():
  37. Config.set_default_data_node_configuration(
  38. "in_memory",
  39. prop1="1",
  40. prop2="2",
  41. prop3="3",
  42. )
  43. dn1 = Config.configure_data_node(id="dn1")
  44. assert len(dn1.properties) == 3
  45. Config.set_default_data_node_configuration(
  46. "csv",
  47. prop4="4",
  48. prop5="5",
  49. prop6="6",
  50. )
  51. dn2 = Config.configure_data_node(id="dn2")
  52. assert dn2.storage_type == "csv"
  53. assert len(dn2.properties) == 6 # encoding, exposed_type, and has_header too
  54. assert dn2.prop4 == "4"
  55. assert dn2.prop5 == "5"
  56. assert dn2.prop6 == "6"
  57. assert dn2.prop1 is None
  58. assert dn2.prop2 is None
  59. assert dn2.prop3 is None
  60. def test_config_storage_type_different_from_default_data_node():
  61. Config.set_default_data_node_configuration(
  62. storage_type="pickle",
  63. custom_property={"foo": "bar"},
  64. scope=Scope.GLOBAL,
  65. )
  66. # Config a datanode with specific "storage_type" different than "pickle"
  67. # should ignore the default datanode
  68. csv_dn = Config.configure_data_node(id="csv_dn", storage_type="csv")
  69. assert len(csv_dn.properties) == 3 # encoding, exposed_type, and has_header
  70. assert csv_dn.properties.get("custom_property") is None
  71. assert csv_dn.scope == Scope.SCENARIO
  72. def test_set_default_csv_data_node_configuration():
  73. Config.set_default_data_node_configuration(
  74. storage_type="csv",
  75. default_path="default.csv",
  76. has_header=False,
  77. exposed_type="numpy",
  78. scope=Scope.GLOBAL,
  79. validity_period=timedelta(2),
  80. )
  81. # Config with generic config_data_node without storage_type
  82. # should return the default DataNode
  83. dn1 = Config.configure_data_node(id="dn1")
  84. assert dn1.storage_type == "csv"
  85. assert dn1.scope == Scope.GLOBAL
  86. assert dn1.default_path == "default.csv"
  87. assert dn1.has_header is False
  88. assert dn1.exposed_type == "numpy"
  89. assert dn1.validity_period == timedelta(2)
  90. # Config with generic config_data_node without storage_type
  91. # with custom properties
  92. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.csv")
  93. assert dn2.storage_type == "csv"
  94. assert dn2.default_path == "dn2.csv"
  95. assert dn2.has_header is False
  96. assert dn2.exposed_type == "numpy"
  97. assert dn2.scope == Scope.GLOBAL
  98. assert dn2.validity_period == timedelta(2)
  99. # Config a datanode with specific "storage_type" = "csv"
  100. # should use properties from the default datanode
  101. dn3 = Config.configure_data_node(
  102. id="dn3",
  103. storage_type="csv",
  104. default_path="dn3.csv",
  105. scope=Scope.SCENARIO,
  106. validity_period=timedelta(1),
  107. )
  108. assert dn3.storage_type == "csv"
  109. assert dn3.default_path == "dn3.csv"
  110. assert dn3.has_header is False
  111. assert dn3.exposed_type == "numpy"
  112. assert dn3.scope == Scope.SCENARIO
  113. assert dn3.validity_period == timedelta(1)
  114. def test_set_default_json_data_node_configuration():
  115. class MyCustomEncoder(json.JSONEncoder):
  116. ...
  117. class MyCustomDecoder(json.JSONDecoder):
  118. ...
  119. Config.set_default_data_node_configuration(
  120. storage_type="json",
  121. default_path="default.json",
  122. encoder=MyCustomEncoder,
  123. scope=Scope.GLOBAL,
  124. validity_period=timedelta(2),
  125. )
  126. # Config with generic config_data_node without storage_type
  127. # should return the default DataNode
  128. dn1 = Config.configure_data_node(id="dn1")
  129. assert dn1.storage_type == "json"
  130. assert dn1.default_path == "default.json"
  131. assert dn1.encoder == MyCustomEncoder
  132. assert dn1.decoder is None
  133. assert dn1.scope == Scope.GLOBAL
  134. assert dn1.validity_period == timedelta(2)
  135. # Config with generic config_data_node without storage_type
  136. # with custom properties
  137. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.json")
  138. assert dn2.storage_type == "json"
  139. assert dn2.default_path == "dn2.json"
  140. assert dn2.encoder == MyCustomEncoder
  141. assert dn2.decoder is None
  142. assert dn2.scope == Scope.GLOBAL
  143. assert dn2.validity_period == timedelta(2)
  144. # Config a datanode with specific "storage_type" = "json"
  145. # should use properties from the default datanode
  146. dn3 = Config.configure_data_node(
  147. id="dn3",
  148. storage_type="json",
  149. default_path="dn3.json",
  150. decoder=MyCustomDecoder,
  151. validity_period=timedelta(1),
  152. )
  153. assert dn3.storage_type == "json"
  154. assert dn3.default_path == "dn3.json"
  155. assert dn3.encoder == MyCustomEncoder
  156. assert dn3.decoder == MyCustomDecoder
  157. assert dn3.scope == Scope.GLOBAL
  158. assert dn3.validity_period == timedelta(1)
  159. def test_set_default_parquet_data_node_configuration():
  160. Config.set_default_data_node_configuration(
  161. storage_type="parquet",
  162. default_path="default.parquet",
  163. compression="gzip",
  164. exposed_type="numpy",
  165. scope=Scope.GLOBAL,
  166. validity_period=timedelta(2),
  167. )
  168. # Config with generic config_data_node without storage_type
  169. # should return the default DataNode
  170. dn1 = Config.configure_data_node(id="dn1")
  171. assert dn1.storage_type == "parquet"
  172. assert dn1.default_path == "default.parquet"
  173. assert dn1.engine == "pyarrow"
  174. assert dn1.compression == "gzip"
  175. assert dn1.read_kwargs is None
  176. assert dn1.write_kwargs is None
  177. assert dn1.exposed_type == "numpy"
  178. assert dn1.scope == Scope.GLOBAL
  179. assert dn1.validity_period == timedelta(2)
  180. # Config with generic config_data_node without storage_type
  181. # with custom properties
  182. dn2 = Config.configure_data_node(
  183. id="dn2",
  184. default_path="dn2.parquet",
  185. engine="fastparquet",
  186. )
  187. assert dn2.storage_type == "parquet"
  188. assert dn2.default_path == "dn2.parquet"
  189. assert dn2.engine == "fastparquet"
  190. assert dn2.compression == "gzip"
  191. assert dn2.read_kwargs is None
  192. assert dn2.write_kwargs is None
  193. assert dn2.exposed_type == "numpy"
  194. assert dn2.scope == Scope.GLOBAL
  195. assert dn2.validity_period == timedelta(2)
  196. # Config a datanode with specific "storage_type" = "parquet"
  197. # should use properties from the default datanode
  198. dn3 = Config.configure_data_node(
  199. id="dn3",
  200. storage_type="parquet",
  201. default_path="dn3.parquet",
  202. read_kwargs={"filter": "foo"},
  203. scope=Scope.SCENARIO,
  204. validity_period=timedelta(1),
  205. )
  206. assert dn3.storage_type == "parquet"
  207. assert dn3.default_path == "dn3.parquet"
  208. assert dn3.engine == "pyarrow"
  209. assert dn3.compression == "gzip"
  210. assert dn3.read_kwargs == {"filter": "foo"}
  211. assert dn3.write_kwargs is None
  212. assert dn3.exposed_type == "numpy"
  213. assert dn3.scope == Scope.SCENARIO
  214. assert dn3.validity_period == timedelta(1)
  215. def test_set_default_excel_data_node_configuration():
  216. Config.set_default_data_node_configuration(
  217. storage_type="excel",
  218. default_path="default.xlsx",
  219. has_header=False,
  220. exposed_type="numpy",
  221. scope=Scope.GLOBAL,
  222. validity_period=timedelta(2),
  223. )
  224. # Config with generic config_data_node without storage_type
  225. # should return the default DataNode
  226. dn1 = Config.configure_data_node(id="dn1")
  227. assert dn1.storage_type == "excel"
  228. assert dn1.scope == Scope.GLOBAL
  229. assert dn1.default_path == "default.xlsx"
  230. assert dn1.has_header is False
  231. assert dn1.sheet_name is None
  232. assert dn1.exposed_type == "numpy"
  233. assert dn1.validity_period == timedelta(2)
  234. # Config with generic config_data_node without storage_type
  235. # with custom properties
  236. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.xlsx", sheet_name="sheet_1")
  237. assert dn2.storage_type == "excel"
  238. assert dn2.default_path == "dn2.xlsx"
  239. assert dn2.has_header is False
  240. assert dn2.sheet_name == "sheet_1"
  241. assert dn2.exposed_type == "numpy"
  242. assert dn2.scope == Scope.GLOBAL
  243. assert dn2.validity_period == timedelta(2)
  244. # Config a datanode with specific "storage_type" = "excel"
  245. # should use properties from the default datanode
  246. dn3 = Config.configure_data_node(
  247. id="dn3",
  248. storage_type="excel",
  249. default_path="dn3.xlsx",
  250. scope=Scope.SCENARIO,
  251. validity_period=timedelta(1),
  252. )
  253. assert dn3.storage_type == "excel"
  254. assert dn3.default_path == "dn3.xlsx"
  255. assert dn3.has_header is False
  256. assert dn3.sheet_name is None
  257. assert dn3.exposed_type == "numpy"
  258. assert dn3.scope == Scope.SCENARIO
  259. assert dn3.validity_period == timedelta(1)
  260. def test_set_default_pickle_data_node_configuration():
  261. Config.set_default_data_node_configuration(
  262. storage_type="pickle",
  263. default_data=1,
  264. exposed_type="numpy",
  265. scope=Scope.GLOBAL,
  266. validity_period=timedelta(2),
  267. )
  268. # Config with generic config_data_node without storage_type
  269. # should return the default DataNode
  270. dn1 = Config.configure_data_node(id="dn1")
  271. assert dn1.storage_type == "pickle"
  272. assert dn1.scope == Scope.GLOBAL
  273. assert dn1.default_path is None
  274. assert dn1.default_data == 1
  275. assert dn1.exposed_type == "numpy"
  276. assert dn1.validity_period == timedelta(2)
  277. # Config with generic config_data_node without storage_type
  278. # with custom properties
  279. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.pkl", default_data=2)
  280. assert dn2.storage_type == "pickle"
  281. assert dn2.default_path == "dn2.pkl"
  282. assert dn2.default_data == 2
  283. assert dn2.exposed_type == "numpy"
  284. assert dn2.scope == Scope.GLOBAL
  285. assert dn2.validity_period == timedelta(2)
  286. # Config a datanode with specific "storage_type" = "pickle"
  287. # should use properties from the default datanode
  288. dn3 = Config.configure_data_node(
  289. id="dn3",
  290. storage_type="pickle",
  291. default_path="dn3.pkl",
  292. scope=Scope.SCENARIO,
  293. validity_period=timedelta(1),
  294. )
  295. assert dn3.storage_type == "pickle"
  296. assert dn3.default_path == "dn3.pkl"
  297. assert dn3.default_data == 1
  298. assert dn3.exposed_type == "numpy"
  299. assert dn3.scope == Scope.SCENARIO
  300. assert dn3.validity_period == timedelta(1)
  301. def test_set_default_sql_table_data_node_configuration():
  302. Config.set_default_data_node_configuration(
  303. storage_type="sql_table",
  304. db_username="default_user",
  305. db_password="default_pwd",
  306. db_name="default_db_name",
  307. db_engine="mssql",
  308. table_name="default_table",
  309. db_port=1010,
  310. db_host="default_host",
  311. db_driver="default server",
  312. db_extra_args={"default": "default"},
  313. scope=Scope.GLOBAL,
  314. validity_period=timedelta(2),
  315. )
  316. # Config with generic config_data_node without storage_type
  317. # should return the default DataNode
  318. dn1 = Config.configure_data_node(id="dn1")
  319. assert dn1.storage_type == "sql_table"
  320. assert dn1.db_username == "default_user"
  321. assert dn1.db_password == "default_pwd"
  322. assert dn1.db_name == "default_db_name"
  323. assert dn1.db_engine == "mssql"
  324. assert dn1.table_name == "default_table"
  325. assert dn1.db_port == 1010
  326. assert dn1.db_host == "default_host"
  327. assert dn1.db_driver == "default server"
  328. assert dn1.db_extra_args == {"default": "default"}
  329. assert dn1.scope == Scope.GLOBAL
  330. assert dn1.validity_period == timedelta(2)
  331. # Config with generic config_data_node without storage_type
  332. # with custom properties
  333. dn2 = Config.configure_data_node(
  334. id="dn2",
  335. table_name="table_2",
  336. db_port=2020,
  337. db_host="host_2",
  338. )
  339. assert dn2.storage_type == "sql_table"
  340. assert dn2.db_username == "default_user"
  341. assert dn2.db_password == "default_pwd"
  342. assert dn2.db_name == "default_db_name"
  343. assert dn2.db_engine == "mssql"
  344. assert dn2.table_name == "table_2"
  345. assert dn2.db_port == 2020
  346. assert dn2.db_host == "host_2"
  347. assert dn2.db_driver == "default server"
  348. assert dn2.db_extra_args == {"default": "default"}
  349. assert dn2.scope == Scope.GLOBAL
  350. assert dn2.validity_period == timedelta(2)
  351. # Config a datanode with specific "storage_type" = "sql_table"
  352. # should use properties from the default datanode
  353. dn3 = Config.configure_data_node(
  354. id="dn3",
  355. storage_type="sql_table",
  356. db_username="user_3",
  357. db_password="pwd_3",
  358. db_name="db_3",
  359. db_engine="postgresql",
  360. table_name="table_3",
  361. validity_period=timedelta(1),
  362. )
  363. assert dn3.storage_type == "sql_table"
  364. assert dn3.db_username == "user_3"
  365. assert dn3.db_password == "pwd_3"
  366. assert dn3.db_name == "db_3"
  367. assert dn3.db_engine == "postgresql"
  368. assert dn3.table_name == "table_3"
  369. assert dn3.db_port == 1010
  370. assert dn3.db_host == "default_host"
  371. assert dn3.db_driver == "default server"
  372. assert dn3.db_extra_args == {"default": "default"}
  373. assert dn3.scope == Scope.GLOBAL
  374. assert dn3.validity_period == timedelta(1)
  375. def test_set_default_sql_data_node_configuration():
  376. def query_builder():
  377. ...
  378. Config.set_default_data_node_configuration(
  379. storage_type="sql",
  380. db_username="default_user",
  381. db_password="default_pwd",
  382. db_name="default_db_name",
  383. db_engine="mssql",
  384. read_query="SELECT * FROM default_table",
  385. write_query_builder=query_builder,
  386. append_query_builder=query_builder,
  387. db_port=1010,
  388. db_host="default_host",
  389. db_driver="default server",
  390. db_extra_args={"default": "default"},
  391. scope=Scope.GLOBAL,
  392. validity_period=timedelta(2),
  393. )
  394. # Config with generic config_data_node without storage_type
  395. # should return the default DataNode
  396. dn1 = Config.configure_data_node(id="dn1")
  397. assert dn1.storage_type == "sql"
  398. assert dn1.db_username == "default_user"
  399. assert dn1.db_password == "default_pwd"
  400. assert dn1.db_name == "default_db_name"
  401. assert dn1.db_engine == "mssql"
  402. assert dn1.read_query == "SELECT * FROM default_table"
  403. assert dn1.write_query_builder == query_builder
  404. assert dn1.append_query_builder == query_builder
  405. assert dn1.db_port == 1010
  406. assert dn1.db_host == "default_host"
  407. assert dn1.db_driver == "default server"
  408. assert dn1.db_extra_args == {"default": "default"}
  409. assert dn1.scope == Scope.GLOBAL
  410. assert dn1.validity_period == timedelta(2)
  411. # Config with generic config_data_node without storage_type
  412. # with custom properties
  413. dn2 = Config.configure_data_node(
  414. id="dn2", table_name="table_2", db_port=2020, db_host="host_2", read_query="SELECT * FROM table_2"
  415. )
  416. assert dn2.storage_type == "sql"
  417. assert dn2.db_username == "default_user"
  418. assert dn2.db_password == "default_pwd"
  419. assert dn2.db_name == "default_db_name"
  420. assert dn2.db_engine == "mssql"
  421. assert dn2.read_query == "SELECT * FROM table_2"
  422. assert dn2.write_query_builder == query_builder
  423. assert dn2.append_query_builder == query_builder
  424. assert dn2.db_port == 2020
  425. assert dn2.db_host == "host_2"
  426. assert dn2.db_driver == "default server"
  427. assert dn2.db_extra_args == {"default": "default"}
  428. assert dn2.scope == Scope.GLOBAL
  429. assert dn2.validity_period == timedelta(2)
  430. # Config a datanode with specific "storage_type" = "sql"
  431. # should use properties from the default datanode
  432. dn3 = Config.configure_data_node(
  433. id="dn3",
  434. storage_type="sql",
  435. db_username="user_3",
  436. db_password="pwd_3",
  437. db_name="db_3",
  438. db_engine="postgresql",
  439. read_query="SELECT * FROM table_3",
  440. write_query_builder=query_builder,
  441. validity_period=timedelta(1),
  442. )
  443. assert dn3.storage_type == "sql"
  444. assert dn3.db_username == "user_3"
  445. assert dn3.db_password == "pwd_3"
  446. assert dn3.db_name == "db_3"
  447. assert dn3.db_engine == "postgresql"
  448. assert dn3.read_query == "SELECT * FROM table_3"
  449. assert dn3.write_query_builder == query_builder
  450. assert dn3.append_query_builder == query_builder
  451. assert dn3.db_port == 1010
  452. assert dn3.db_host == "default_host"
  453. assert dn3.db_driver == "default server"
  454. assert dn3.db_extra_args == {"default": "default"}
  455. assert dn3.scope == Scope.GLOBAL
  456. assert dn3.validity_period == timedelta(1)
  457. def test_set_default_mongo_collection_data_node_configuration():
  458. Config.set_default_data_node_configuration(
  459. storage_type="mongo_collection",
  460. db_name="default_db_name",
  461. collection_name="default_collection",
  462. db_port=1010,
  463. db_host="default_host",
  464. db_driver="default server",
  465. db_extra_args={"default": "default"},
  466. scope=Scope.GLOBAL,
  467. validity_period=timedelta(2),
  468. )
  469. # Config with generic config_data_node without storage_type
  470. # should return the default DataNode
  471. dn1 = Config.configure_data_node(id="dn1")
  472. assert dn1.storage_type == "mongo_collection"
  473. assert dn1.db_username == ""
  474. assert dn1.db_password == ""
  475. assert dn1.db_name == "default_db_name"
  476. assert dn1.collection_name == "default_collection"
  477. assert dn1.custom_document == MongoDefaultDocument
  478. assert dn1.db_host == "default_host"
  479. assert dn1.db_port == 1010
  480. assert dn1.db_driver == "default server"
  481. assert dn1.db_extra_args == {"default": "default"}
  482. assert dn1.scope == Scope.GLOBAL
  483. assert dn1.validity_period == timedelta(2)
  484. # Config with generic config_data_node without storage_type
  485. # with custom properties
  486. dn2 = Config.configure_data_node(
  487. id="dn2",
  488. collection_name="collection_2",
  489. db_port=2020,
  490. db_host="host_2",
  491. )
  492. assert dn2.storage_type == "mongo_collection"
  493. assert dn2.db_username == ""
  494. assert dn2.db_password == ""
  495. assert dn2.db_name == "default_db_name"
  496. assert dn2.collection_name == "collection_2"
  497. assert dn2.custom_document == MongoDefaultDocument
  498. assert dn2.db_host == "host_2"
  499. assert dn2.db_port == 2020
  500. assert dn2.db_driver == "default server"
  501. assert dn2.db_extra_args == {"default": "default"}
  502. assert dn2.scope == Scope.GLOBAL
  503. assert dn2.validity_period == timedelta(2)
  504. # Config a datanode with specific "storage_type" = "mongo_collection"
  505. # should use properties from the default datanode
  506. dn3 = Config.configure_data_node(
  507. id="dn3",
  508. storage_type="mongo_collection",
  509. db_name="db_3",
  510. collection_name="collection_3",
  511. db_username="user_3",
  512. db_password="pwd_3",
  513. validity_period=timedelta(1),
  514. )
  515. assert dn3.storage_type == "mongo_collection"
  516. assert dn3.db_username == "user_3"
  517. assert dn3.db_password == "pwd_3"
  518. assert dn3.db_name == "db_3"
  519. assert dn3.collection_name == "collection_3"
  520. assert dn3.custom_document == MongoDefaultDocument
  521. assert dn3.db_port == 1010
  522. assert dn3.db_host == "default_host"
  523. assert dn3.db_driver == "default server"
  524. assert dn3.db_extra_args == {"default": "default"}
  525. assert dn3.scope == Scope.GLOBAL
  526. assert dn3.validity_period == timedelta(1)
  527. def test_set_default_s3_object_data_node_configuration():
  528. Config.set_default_data_node_configuration(
  529. storage_type="s3_object",
  530. aws_access_key="default_access_key",
  531. aws_secret_access_key="default_secret_acces_key",
  532. aws_s3_bucket_name="default_bucket_name",
  533. aws_s3_object_key="default_object_key",
  534. aws_region="",
  535. aws_s3_object_parameters={"default": "default"},
  536. scope=Scope.GLOBAL,
  537. validity_period=timedelta(2),
  538. )
  539. # Config with generic config_data_node without storage_type
  540. # should return the default DataNode
  541. dn1 = Config.configure_data_node(id="dn1")
  542. assert dn1.storage_type == "s3_object"
  543. assert dn1.aws_access_key == "default_access_key"
  544. assert dn1.aws_secret_access_key == "default_secret_acces_key"
  545. assert dn1.aws_s3_bucket_name == "default_bucket_name"
  546. assert dn1.aws_s3_object_key == "default_object_key"
  547. assert dn1.aws_region == ""
  548. assert dn1.aws_s3_object_parameters == {"default": "default"}
  549. assert dn1.scope == Scope.GLOBAL
  550. assert dn1.validity_period == timedelta(2)
  551. # Config with generic config_data_node without storage_type
  552. # with custom properties
  553. dn2 = Config.configure_data_node(
  554. id="dn2",
  555. aws_access_key="custom_access_key_2",
  556. aws_secret_access_key="custom_secret_acces_key_2",
  557. aws_s3_bucket_name="custom_bucket_name_2",
  558. aws_s3_object_key="custom_object_key_2",
  559. )
  560. assert dn2.storage_type == "s3_object"
  561. assert dn2.aws_access_key == "custom_access_key_2"
  562. assert dn2.aws_secret_access_key == "custom_secret_acces_key_2"
  563. assert dn2.aws_s3_bucket_name == "custom_bucket_name_2"
  564. assert dn2.aws_s3_object_key == "custom_object_key_2"
  565. assert dn2.aws_region == ""
  566. assert dn2.aws_s3_object_parameters == {"default": "default"}
  567. assert dn2.scope == Scope.GLOBAL
  568. assert dn2.validity_period == timedelta(2)
  569. # Config a datanode with specific "storage_type" = "s3_object"
  570. # should use properties from the default datanode
  571. dn3 = Config.configure_data_node(
  572. id="dn3",
  573. storage_type="s3_object",
  574. aws_access_key="custom_access_key_3",
  575. aws_secret_access_key="custom_secret_acces_key_3",
  576. aws_s3_bucket_name="custom_bucket_name_3",
  577. aws_s3_object_key="custom_object_key_3",
  578. aws_region="",
  579. aws_s3_object_parameters={"default": "default"},
  580. scope=Scope.GLOBAL,
  581. validity_period=timedelta(1),
  582. )
  583. assert dn3.storage_type == "s3_object"
  584. assert dn3.aws_access_key == "custom_access_key_3"
  585. assert dn3.aws_secret_access_key == "custom_secret_acces_key_3"
  586. assert dn3.aws_s3_bucket_name == "custom_bucket_name_3"
  587. assert dn3.aws_s3_object_key == "custom_object_key_3"
  588. assert dn3.aws_region == ""
  589. assert dn3.aws_s3_object_parameters == {"default": "default"}
  590. assert dn3.scope == Scope.GLOBAL
  591. assert dn3.validity_period == timedelta(1)