test_configure_default_config.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import json
  12. from datetime import timedelta
  13. from taipy.common.config import Config
  14. from taipy.common.config.common.scope import Scope
  15. from taipy.core.common.mongo_default_document import MongoDefaultDocument
  16. def test_set_default_data_node_configuration():
  17. data_node1 = Config.configure_data_node(id="input_data1")
  18. assert data_node1.storage_type == "pickle"
  19. assert data_node1.scope == Scope.SCENARIO
  20. assert data_node1.validity_period is None
  21. Config.set_default_data_node_configuration("in_memory", scope=Scope.GLOBAL)
  22. data_node2 = Config.configure_data_node(id="input_data2")
  23. assert data_node2.storage_type == "in_memory"
  24. assert data_node2.scope == Scope.GLOBAL
  25. assert data_node2.validity_period is None
  26. Config.set_default_data_node_configuration("csv")
  27. data_node3 = Config.configure_data_node(id="input_data3")
  28. assert data_node3.storage_type == "csv"
  29. assert data_node3.scope == Scope.SCENARIO
  30. assert data_node3.validity_period is None
  31. Config.set_default_data_node_configuration("json", validity_period=timedelta(1))
  32. data_node4 = Config.configure_data_node(id="input_data4")
  33. assert data_node4.storage_type == "json"
  34. assert data_node4.scope == Scope.SCENARIO
  35. assert data_node4.validity_period == timedelta(1)
  36. Config.set_default_data_node_configuration("s3_object", validity_period=timedelta(1))
  37. data_node5 = Config.configure_data_node(id="input_data5")
  38. assert data_node5.storage_type == "s3_object"
  39. assert data_node5.scope == Scope.SCENARIO
  40. assert data_node5.validity_period == timedelta(1)
  41. def test_set_default_data_node_configuration_replace_old_default_config():
  42. Config.set_default_data_node_configuration(
  43. "in_memory",
  44. prop1="1",
  45. prop2="2",
  46. prop3="3",
  47. )
  48. dn1 = Config.configure_data_node(id="dn1")
  49. assert len(dn1.properties) == 3
  50. Config.set_default_data_node_configuration(
  51. "csv",
  52. prop4="4",
  53. prop5="5",
  54. prop6="6",
  55. )
  56. dn2 = Config.configure_data_node(id="dn2")
  57. assert dn2.storage_type == "csv"
  58. assert len(dn2.properties) == 6 # encoding, exposed_type, and has_header too
  59. assert dn2.prop4 == "4"
  60. assert dn2.prop5 == "5"
  61. assert dn2.prop6 == "6"
  62. assert dn2.prop1 is None
  63. assert dn2.prop2 is None
  64. assert dn2.prop3 is None
  65. def test_config_storage_type_different_from_default_data_node():
  66. Config.set_default_data_node_configuration(
  67. storage_type="pickle",
  68. custom_property={"foo": "bar"},
  69. scope=Scope.GLOBAL,
  70. )
  71. # Config a datanode with specific "storage_type" different than "pickle"
  72. # should ignore the default datanode
  73. csv_dn = Config.configure_data_node(id="csv_dn", storage_type="csv")
  74. assert len(csv_dn.properties) == 3 # encoding, exposed_type, and has_header
  75. assert csv_dn.properties.get("custom_property") is None
  76. assert csv_dn.scope == Scope.SCENARIO
  77. def test_set_default_csv_data_node_configuration():
  78. Config.set_default_data_node_configuration(
  79. storage_type="csv",
  80. default_path="default.csv",
  81. has_header=False,
  82. exposed_type="numpy",
  83. scope=Scope.GLOBAL,
  84. validity_period=timedelta(2),
  85. )
  86. # Config with generic config_data_node without storage_type
  87. # should return the default DataNode
  88. dn1 = Config.configure_data_node(id="dn1")
  89. assert dn1.storage_type == "csv"
  90. assert dn1.scope == Scope.GLOBAL
  91. assert dn1.default_path == "default.csv"
  92. assert dn1.has_header is False
  93. assert dn1.exposed_type == "numpy"
  94. assert dn1.validity_period == timedelta(2)
  95. # Config with generic config_data_node without storage_type
  96. # with custom properties
  97. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.csv")
  98. assert dn2.storage_type == "csv"
  99. assert dn2.default_path == "dn2.csv"
  100. assert dn2.has_header is False
  101. assert dn2.exposed_type == "numpy"
  102. assert dn2.scope == Scope.GLOBAL
  103. assert dn2.validity_period == timedelta(2)
  104. # Config a datanode with specific "storage_type" = "csv"
  105. # should use properties from the default datanode
  106. dn3 = Config.configure_data_node(
  107. id="dn3",
  108. storage_type="csv",
  109. default_path="dn3.csv",
  110. scope=Scope.SCENARIO,
  111. validity_period=timedelta(1),
  112. )
  113. assert dn3.storage_type == "csv"
  114. assert dn3.default_path == "dn3.csv"
  115. assert dn3.has_header is False
  116. assert dn3.exposed_type == "numpy"
  117. assert dn3.scope == Scope.SCENARIO
  118. assert dn3.validity_period == timedelta(1)
  119. def test_set_default_json_data_node_configuration():
  120. class MyCustomEncoder(json.JSONEncoder): ...
  121. class MyCustomDecoder(json.JSONDecoder): ...
  122. Config.set_default_data_node_configuration(
  123. storage_type="json",
  124. default_path="default.json",
  125. encoder=MyCustomEncoder,
  126. scope=Scope.GLOBAL,
  127. validity_period=timedelta(2),
  128. )
  129. # Config with generic config_data_node without storage_type
  130. # should return the default DataNode
  131. dn1 = Config.configure_data_node(id="dn1")
  132. assert dn1.storage_type == "json"
  133. assert dn1.default_path == "default.json"
  134. assert dn1.encoder == MyCustomEncoder
  135. assert dn1.decoder is None
  136. assert dn1.scope == Scope.GLOBAL
  137. assert dn1.validity_period == timedelta(2)
  138. # Config with generic config_data_node without storage_type
  139. # with custom properties
  140. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.json")
  141. assert dn2.storage_type == "json"
  142. assert dn2.default_path == "dn2.json"
  143. assert dn2.encoder == MyCustomEncoder
  144. assert dn2.decoder is None
  145. assert dn2.scope == Scope.GLOBAL
  146. assert dn2.validity_period == timedelta(2)
  147. # Config a datanode with specific "storage_type" = "json"
  148. # should use properties from the default datanode
  149. dn3 = Config.configure_data_node(
  150. id="dn3",
  151. storage_type="json",
  152. default_path="dn3.json",
  153. decoder=MyCustomDecoder,
  154. validity_period=timedelta(1),
  155. )
  156. assert dn3.storage_type == "json"
  157. assert dn3.default_path == "dn3.json"
  158. assert dn3.encoder == MyCustomEncoder
  159. assert dn3.decoder == MyCustomDecoder
  160. assert dn3.scope == Scope.GLOBAL
  161. assert dn3.validity_period == timedelta(1)
  162. def test_set_default_parquet_data_node_configuration():
  163. Config.set_default_data_node_configuration(
  164. storage_type="parquet",
  165. default_path="default.parquet",
  166. compression="gzip",
  167. exposed_type="numpy",
  168. scope=Scope.GLOBAL,
  169. validity_period=timedelta(2),
  170. )
  171. # Config with generic config_data_node without storage_type
  172. # should return the default DataNode
  173. dn1 = Config.configure_data_node(id="dn1")
  174. assert dn1.storage_type == "parquet"
  175. assert dn1.default_path == "default.parquet"
  176. assert dn1.engine == "pyarrow"
  177. assert dn1.compression == "gzip"
  178. assert dn1.read_kwargs is None
  179. assert dn1.write_kwargs is None
  180. assert dn1.exposed_type == "numpy"
  181. assert dn1.scope == Scope.GLOBAL
  182. assert dn1.validity_period == timedelta(2)
  183. # Config with generic config_data_node without storage_type
  184. # with custom properties
  185. dn2 = Config.configure_data_node(
  186. id="dn2",
  187. default_path="dn2.parquet",
  188. engine="fastparquet",
  189. )
  190. assert dn2.storage_type == "parquet"
  191. assert dn2.default_path == "dn2.parquet"
  192. assert dn2.engine == "fastparquet"
  193. assert dn2.compression == "gzip"
  194. assert dn2.read_kwargs is None
  195. assert dn2.write_kwargs is None
  196. assert dn2.exposed_type == "numpy"
  197. assert dn2.scope == Scope.GLOBAL
  198. assert dn2.validity_period == timedelta(2)
  199. # Config a datanode with specific "storage_type" = "parquet"
  200. # should use properties from the default datanode
  201. dn3 = Config.configure_data_node(
  202. id="dn3",
  203. storage_type="parquet",
  204. default_path="dn3.parquet",
  205. read_kwargs={"filter": "foo"},
  206. scope=Scope.SCENARIO,
  207. validity_period=timedelta(1),
  208. )
  209. assert dn3.storage_type == "parquet"
  210. assert dn3.default_path == "dn3.parquet"
  211. assert dn3.engine == "pyarrow"
  212. assert dn3.compression == "gzip"
  213. assert dn3.read_kwargs == {"filter": "foo"}
  214. assert dn3.write_kwargs is None
  215. assert dn3.exposed_type == "numpy"
  216. assert dn3.scope == Scope.SCENARIO
  217. assert dn3.validity_period == timedelta(1)
  218. def test_set_default_excel_data_node_configuration():
  219. Config.set_default_data_node_configuration(
  220. storage_type="excel",
  221. default_path="default.xlsx",
  222. has_header=False,
  223. exposed_type="numpy",
  224. scope=Scope.GLOBAL,
  225. validity_period=timedelta(2),
  226. )
  227. # Config with generic config_data_node without storage_type
  228. # should return the default DataNode
  229. dn1 = Config.configure_data_node(id="dn1")
  230. assert dn1.storage_type == "excel"
  231. assert dn1.scope == Scope.GLOBAL
  232. assert dn1.default_path == "default.xlsx"
  233. assert dn1.has_header is False
  234. assert dn1.sheet_name is None
  235. assert dn1.exposed_type == "numpy"
  236. assert dn1.validity_period == timedelta(2)
  237. # Config with generic config_data_node without storage_type
  238. # with custom properties
  239. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.xlsx", sheet_name="sheet_1")
  240. assert dn2.storage_type == "excel"
  241. assert dn2.default_path == "dn2.xlsx"
  242. assert dn2.has_header is False
  243. assert dn2.sheet_name == "sheet_1"
  244. assert dn2.exposed_type == "numpy"
  245. assert dn2.scope == Scope.GLOBAL
  246. assert dn2.validity_period == timedelta(2)
  247. # Config a datanode with specific "storage_type" = "excel"
  248. # should use properties from the default datanode
  249. dn3 = Config.configure_data_node(
  250. id="dn3",
  251. storage_type="excel",
  252. default_path="dn3.xlsx",
  253. scope=Scope.SCENARIO,
  254. validity_period=timedelta(1),
  255. )
  256. assert dn3.storage_type == "excel"
  257. assert dn3.default_path == "dn3.xlsx"
  258. assert dn3.has_header is False
  259. assert dn3.sheet_name is None
  260. assert dn3.exposed_type == "numpy"
  261. assert dn3.scope == Scope.SCENARIO
  262. assert dn3.validity_period == timedelta(1)
  263. def test_set_default_pickle_data_node_configuration():
  264. Config.set_default_data_node_configuration(
  265. storage_type="pickle",
  266. default_data=1,
  267. exposed_type="numpy",
  268. scope=Scope.GLOBAL,
  269. validity_period=timedelta(2),
  270. )
  271. # Config with generic config_data_node without storage_type
  272. # should return the default DataNode
  273. dn1 = Config.configure_data_node(id="dn1")
  274. assert dn1.storage_type == "pickle"
  275. assert dn1.scope == Scope.GLOBAL
  276. assert dn1.default_path is None
  277. assert dn1.default_data == 1
  278. assert dn1.exposed_type == "numpy"
  279. assert dn1.validity_period == timedelta(2)
  280. # Config with generic config_data_node without storage_type
  281. # with custom properties
  282. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.pkl", default_data=2)
  283. assert dn2.storage_type == "pickle"
  284. assert dn2.default_path == "dn2.pkl"
  285. assert dn2.default_data == 2
  286. assert dn2.exposed_type == "numpy"
  287. assert dn2.scope == Scope.GLOBAL
  288. assert dn2.validity_period == timedelta(2)
  289. # Config a datanode with specific "storage_type" = "pickle"
  290. # should use properties from the default datanode
  291. dn3 = Config.configure_data_node(
  292. id="dn3",
  293. storage_type="pickle",
  294. default_path="dn3.pkl",
  295. scope=Scope.SCENARIO,
  296. validity_period=timedelta(1),
  297. )
  298. assert dn3.storage_type == "pickle"
  299. assert dn3.default_path == "dn3.pkl"
  300. assert dn3.default_data == 1
  301. assert dn3.exposed_type == "numpy"
  302. assert dn3.scope == Scope.SCENARIO
  303. assert dn3.validity_period == timedelta(1)
  304. def test_set_default_sql_table_data_node_configuration():
  305. Config.set_default_data_node_configuration(
  306. storage_type="sql_table",
  307. db_username="default_user",
  308. db_password="default_pwd",
  309. db_name="default_db_name",
  310. db_engine="mssql",
  311. table_name="default_table",
  312. db_port=1010,
  313. db_host="default_host",
  314. db_driver="default server",
  315. db_extra_args={"default": "default"},
  316. scope=Scope.GLOBAL,
  317. validity_period=timedelta(2),
  318. )
  319. # Config with generic config_data_node without storage_type
  320. # should return the default DataNode
  321. dn1 = Config.configure_data_node(id="dn1")
  322. assert dn1.storage_type == "sql_table"
  323. assert dn1.db_username == "default_user"
  324. assert dn1.db_password == "default_pwd"
  325. assert dn1.db_name == "default_db_name"
  326. assert dn1.db_engine == "mssql"
  327. assert dn1.table_name == "default_table"
  328. assert dn1.db_port == 1010
  329. assert dn1.db_host == "default_host"
  330. assert dn1.db_driver == "default server"
  331. assert dn1.db_extra_args == {"default": "default"}
  332. assert dn1.scope == Scope.GLOBAL
  333. assert dn1.validity_period == timedelta(2)
  334. # Config with generic config_data_node without storage_type
  335. # with custom properties
  336. dn2 = Config.configure_data_node(
  337. id="dn2",
  338. table_name="table_2",
  339. db_port=2020,
  340. db_host="host_2",
  341. )
  342. assert dn2.storage_type == "sql_table"
  343. assert dn2.db_username == "default_user"
  344. assert dn2.db_password == "default_pwd"
  345. assert dn2.db_name == "default_db_name"
  346. assert dn2.db_engine == "mssql"
  347. assert dn2.table_name == "table_2"
  348. assert dn2.db_port == 2020
  349. assert dn2.db_host == "host_2"
  350. assert dn2.db_driver == "default server"
  351. assert dn2.db_extra_args == {"default": "default"}
  352. assert dn2.scope == Scope.GLOBAL
  353. assert dn2.validity_period == timedelta(2)
  354. # Config a datanode with specific "storage_type" = "sql_table"
  355. # should use properties from the default datanode
  356. dn3 = Config.configure_data_node(
  357. id="dn3",
  358. storage_type="sql_table",
  359. db_username="user_3",
  360. db_password="pwd_3",
  361. db_name="db_3",
  362. db_engine="postgresql",
  363. table_name="table_3",
  364. validity_period=timedelta(1),
  365. )
  366. assert dn3.storage_type == "sql_table"
  367. assert dn3.db_username == "user_3"
  368. assert dn3.db_password == "pwd_3"
  369. assert dn3.db_name == "db_3"
  370. assert dn3.db_engine == "postgresql"
  371. assert dn3.table_name == "table_3"
  372. assert dn3.db_port == 1010
  373. assert dn3.db_host == "default_host"
  374. assert dn3.db_driver == "default server"
  375. assert dn3.db_extra_args == {"default": "default"}
  376. assert dn3.scope == Scope.GLOBAL
  377. assert dn3.validity_period == timedelta(1)
  378. def test_set_default_sql_data_node_configuration():
  379. def query_builder(): ...
  380. Config.set_default_data_node_configuration(
  381. storage_type="sql",
  382. db_username="default_user",
  383. db_password="default_pwd",
  384. db_name="default_db_name",
  385. db_engine="mssql",
  386. read_query="SELECT * FROM default_table",
  387. write_query_builder=query_builder,
  388. append_query_builder=query_builder,
  389. db_port=1010,
  390. db_host="default_host",
  391. db_driver="default server",
  392. db_extra_args={"default": "default"},
  393. scope=Scope.GLOBAL,
  394. validity_period=timedelta(2),
  395. )
  396. # Config with generic config_data_node without storage_type
  397. # should return the default DataNode
  398. dn1 = Config.configure_data_node(id="dn1")
  399. assert dn1.storage_type == "sql"
  400. assert dn1.db_username == "default_user"
  401. assert dn1.db_password == "default_pwd"
  402. assert dn1.db_name == "default_db_name"
  403. assert dn1.db_engine == "mssql"
  404. assert dn1.read_query == "SELECT * FROM default_table"
  405. assert dn1.write_query_builder == query_builder
  406. assert dn1.append_query_builder == query_builder
  407. assert dn1.db_port == 1010
  408. assert dn1.db_host == "default_host"
  409. assert dn1.db_driver == "default server"
  410. assert dn1.db_extra_args == {"default": "default"}
  411. assert dn1.scope == Scope.GLOBAL
  412. assert dn1.validity_period == timedelta(2)
  413. # Config with generic config_data_node without storage_type
  414. # with custom properties
  415. dn2 = Config.configure_data_node(
  416. id="dn2", table_name="table_2", db_port=2020, db_host="host_2", read_query="SELECT * FROM table_2"
  417. )
  418. assert dn2.storage_type == "sql"
  419. assert dn2.db_username == "default_user"
  420. assert dn2.db_password == "default_pwd"
  421. assert dn2.db_name == "default_db_name"
  422. assert dn2.db_engine == "mssql"
  423. assert dn2.read_query == "SELECT * FROM table_2"
  424. assert dn2.write_query_builder == query_builder
  425. assert dn2.append_query_builder == query_builder
  426. assert dn2.db_port == 2020
  427. assert dn2.db_host == "host_2"
  428. assert dn2.db_driver == "default server"
  429. assert dn2.db_extra_args == {"default": "default"}
  430. assert dn2.scope == Scope.GLOBAL
  431. assert dn2.validity_period == timedelta(2)
  432. # Config a datanode with specific "storage_type" = "sql"
  433. # should use properties from the default datanode
  434. dn3 = Config.configure_data_node(
  435. id="dn3",
  436. storage_type="sql",
  437. db_username="user_3",
  438. db_password="pwd_3",
  439. db_name="db_3",
  440. db_engine="postgresql",
  441. read_query="SELECT * FROM table_3",
  442. write_query_builder=query_builder,
  443. validity_period=timedelta(1),
  444. )
  445. assert dn3.storage_type == "sql"
  446. assert dn3.db_username == "user_3"
  447. assert dn3.db_password == "pwd_3"
  448. assert dn3.db_name == "db_3"
  449. assert dn3.db_engine == "postgresql"
  450. assert dn3.read_query == "SELECT * FROM table_3"
  451. assert dn3.write_query_builder == query_builder
  452. assert dn3.append_query_builder == query_builder
  453. assert dn3.db_port == 1010
  454. assert dn3.db_host == "default_host"
  455. assert dn3.db_driver == "default server"
  456. assert dn3.db_extra_args == {"default": "default"}
  457. assert dn3.scope == Scope.GLOBAL
  458. assert dn3.validity_period == timedelta(1)
  459. def test_set_default_mongo_collection_data_node_configuration():
  460. Config.set_default_data_node_configuration(
  461. storage_type="mongo_collection",
  462. db_name="default_db_name",
  463. collection_name="default_collection",
  464. db_port=1010,
  465. db_host="default_host",
  466. db_driver="default server",
  467. db_extra_args={"default": "default"},
  468. scope=Scope.GLOBAL,
  469. validity_period=timedelta(2),
  470. )
  471. # Config with generic config_data_node without storage_type
  472. # should return the default DataNode
  473. dn1 = Config.configure_data_node(id="dn1")
  474. assert dn1.storage_type == "mongo_collection"
  475. assert dn1.db_username == ""
  476. assert dn1.db_password == ""
  477. assert dn1.db_name == "default_db_name"
  478. assert dn1.collection_name == "default_collection"
  479. assert dn1.custom_document == MongoDefaultDocument
  480. assert dn1.db_host == "default_host"
  481. assert dn1.db_port == 1010
  482. assert dn1.db_driver == "default server"
  483. assert dn1.db_extra_args == {"default": "default"}
  484. assert dn1.scope == Scope.GLOBAL
  485. assert dn1.validity_period == timedelta(2)
  486. # Config with generic config_data_node without storage_type
  487. # with custom properties
  488. dn2 = Config.configure_data_node(
  489. id="dn2",
  490. collection_name="collection_2",
  491. db_port=2020,
  492. db_host="host_2",
  493. )
  494. assert dn2.storage_type == "mongo_collection"
  495. assert dn2.db_username == ""
  496. assert dn2.db_password == ""
  497. assert dn2.db_name == "default_db_name"
  498. assert dn2.collection_name == "collection_2"
  499. assert dn2.custom_document == MongoDefaultDocument
  500. assert dn2.db_host == "host_2"
  501. assert dn2.db_port == 2020
  502. assert dn2.db_driver == "default server"
  503. assert dn2.db_extra_args == {"default": "default"}
  504. assert dn2.scope == Scope.GLOBAL
  505. assert dn2.validity_period == timedelta(2)
  506. # Config a datanode with specific "storage_type" = "mongo_collection"
  507. # should use properties from the default datanode
  508. dn3 = Config.configure_data_node(
  509. id="dn3",
  510. storage_type="mongo_collection",
  511. db_name="db_3",
  512. collection_name="collection_3",
  513. db_username="user_3",
  514. db_password="pwd_3",
  515. validity_period=timedelta(1),
  516. )
  517. assert dn3.storage_type == "mongo_collection"
  518. assert dn3.db_username == "user_3"
  519. assert dn3.db_password == "pwd_3"
  520. assert dn3.db_name == "db_3"
  521. assert dn3.collection_name == "collection_3"
  522. assert dn3.custom_document == MongoDefaultDocument
  523. assert dn3.db_port == 1010
  524. assert dn3.db_host == "default_host"
  525. assert dn3.db_driver == "default server"
  526. assert dn3.db_extra_args == {"default": "default"}
  527. assert dn3.scope == Scope.GLOBAL
  528. assert dn3.validity_period == timedelta(1)
  529. def test_set_default_s3_object_data_node_configuration():
  530. Config.set_default_data_node_configuration(
  531. storage_type="s3_object",
  532. aws_access_key="default_access_key",
  533. aws_secret_access_key="default_secret_acces_key",
  534. aws_s3_bucket_name="default_bucket_name",
  535. aws_s3_object_key="default_object_key",
  536. aws_region="",
  537. aws_s3_object_parameters={"default": "default"},
  538. scope=Scope.GLOBAL,
  539. validity_period=timedelta(2),
  540. )
  541. # Config with generic config_data_node without storage_type
  542. # should return the default DataNode
  543. dn1 = Config.configure_data_node(id="dn1")
  544. assert dn1.storage_type == "s3_object"
  545. assert dn1.aws_access_key == "default_access_key"
  546. assert dn1.aws_secret_access_key == "default_secret_acces_key"
  547. assert dn1.aws_s3_bucket_name == "default_bucket_name"
  548. assert dn1.aws_s3_object_key == "default_object_key"
  549. assert dn1.aws_region == ""
  550. assert dn1.aws_s3_object_parameters == {"default": "default"}
  551. assert dn1.scope == Scope.GLOBAL
  552. assert dn1.validity_period == timedelta(2)
  553. # Config with generic config_data_node without storage_type
  554. # with custom properties
  555. dn2 = Config.configure_data_node(
  556. id="dn2",
  557. aws_access_key="custom_access_key_2",
  558. aws_secret_access_key="custom_secret_acces_key_2",
  559. aws_s3_bucket_name="custom_bucket_name_2",
  560. aws_s3_object_key="custom_object_key_2",
  561. )
  562. assert dn2.storage_type == "s3_object"
  563. assert dn2.aws_access_key == "custom_access_key_2"
  564. assert dn2.aws_secret_access_key == "custom_secret_acces_key_2"
  565. assert dn2.aws_s3_bucket_name == "custom_bucket_name_2"
  566. assert dn2.aws_s3_object_key == "custom_object_key_2"
  567. assert dn2.aws_region == ""
  568. assert dn2.aws_s3_object_parameters == {"default": "default"}
  569. assert dn2.scope == Scope.GLOBAL
  570. assert dn2.validity_period == timedelta(2)
  571. # Config a datanode with specific "storage_type" = "s3_object"
  572. # should use properties from the default datanode
  573. dn3 = Config.configure_data_node(
  574. id="dn3",
  575. storage_type="s3_object",
  576. aws_access_key="custom_access_key_3",
  577. aws_secret_access_key="custom_secret_acces_key_3",
  578. aws_s3_bucket_name="custom_bucket_name_3",
  579. aws_s3_object_key="custom_object_key_3",
  580. aws_region="",
  581. aws_s3_object_parameters={"default": "default"},
  582. scope=Scope.GLOBAL,
  583. validity_period=timedelta(1),
  584. )
  585. assert dn3.storage_type == "s3_object"
  586. assert dn3.aws_access_key == "custom_access_key_3"
  587. assert dn3.aws_secret_access_key == "custom_secret_acces_key_3"
  588. assert dn3.aws_s3_bucket_name == "custom_bucket_name_3"
  589. assert dn3.aws_s3_object_key == "custom_object_key_3"
  590. assert dn3.aws_region == ""
  591. assert dn3.aws_s3_object_parameters == {"default": "default"}
  592. assert dn3.scope == Scope.GLOBAL
  593. assert dn3.validity_period == timedelta(1)