test_configure_default_config.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import json
  12. from datetime import timedelta
  13. import pytest
  14. from taipy.common.config import Config
  15. from taipy.common.config.common.scope import Scope
  16. from taipy.core.common.mongo_default_document import MongoDefaultDocument
  17. def test_set_default_data_node_configuration():
  18. data_node1 = Config.configure_data_node(id="input_data1")
  19. assert data_node1.storage_type == "pickle"
  20. assert data_node1.scope == Scope.SCENARIO
  21. assert data_node1.validity_period is None
  22. Config.set_default_data_node_configuration("in_memory", scope=Scope.GLOBAL)
  23. data_node2 = Config.configure_data_node(id="input_data2")
  24. assert data_node2.storage_type == "in_memory"
  25. assert data_node2.scope == Scope.GLOBAL
  26. assert data_node2.validity_period is None
  27. Config.set_default_data_node_configuration("csv")
  28. data_node3 = Config.configure_data_node(id="input_data3")
  29. assert data_node3.storage_type == "csv"
  30. assert data_node3.scope == Scope.SCENARIO
  31. assert data_node3.validity_period is None
  32. Config.set_default_data_node_configuration("json", validity_period=timedelta(1))
  33. data_node4 = Config.configure_data_node(id="input_data4")
  34. assert data_node4.storage_type == "json"
  35. assert data_node4.scope == Scope.SCENARIO
  36. assert data_node4.validity_period == timedelta(1)
  37. Config.set_default_data_node_configuration("s3_object", validity_period=timedelta(1))
  38. data_node5 = Config.configure_data_node(id="input_data5")
  39. assert data_node5.storage_type == "s3_object"
  40. assert data_node5.scope == Scope.SCENARIO
  41. assert data_node5.validity_period == timedelta(1)
  42. def test_set_default_data_node_configuration_replace_old_default_config():
  43. Config.set_default_data_node_configuration(
  44. "in_memory",
  45. prop1="1",
  46. prop2="2",
  47. prop3="3",
  48. )
  49. dn1 = Config.configure_data_node(id="dn1")
  50. assert len(dn1.properties) == 3
  51. Config.set_default_data_node_configuration(
  52. "csv",
  53. prop4="4",
  54. prop5="5",
  55. prop6="6",
  56. )
  57. dn2 = Config.configure_data_node(id="dn2")
  58. assert dn2.storage_type == "csv"
  59. assert len(dn2.properties) == 6 # encoding, exposed_type, and has_header too
  60. assert dn2.prop4 == "4"
  61. assert dn2.prop5 == "5"
  62. assert dn2.prop6 == "6"
  63. assert dn2.prop1 is None
  64. assert dn2.prop2 is None
  65. assert dn2.prop3 is None
  66. def test_config_storage_type_different_from_default_data_node():
  67. Config.set_default_data_node_configuration(
  68. storage_type="pickle",
  69. custom_property={"foo": "bar"},
  70. scope=Scope.GLOBAL,
  71. )
  72. # Config a datanode with specific "storage_type" different than "pickle"
  73. # should ignore the default datanode
  74. csv_dn = Config.configure_data_node(id="csv_dn", storage_type="csv")
  75. assert len(csv_dn.properties) == 3 # encoding, exposed_type, and has_header
  76. assert csv_dn.properties.get("custom_property") is None
  77. assert csv_dn.scope == Scope.SCENARIO
  78. def test_set_default_csv_data_node_configuration():
  79. Config.set_default_data_node_configuration(
  80. storage_type="csv",
  81. default_path="default.csv",
  82. has_header=False,
  83. exposed_type="numpy",
  84. scope=Scope.GLOBAL,
  85. validity_period=timedelta(2),
  86. )
  87. # Config with generic config_data_node without storage_type
  88. # should return the default DataNode
  89. dn1 = Config.configure_data_node(id="dn1")
  90. assert dn1.storage_type == "csv"
  91. assert dn1.scope == Scope.GLOBAL
  92. assert dn1.default_path == "default.csv"
  93. assert dn1.has_header is False
  94. assert dn1.exposed_type == "numpy"
  95. assert dn1.validity_period == timedelta(2)
  96. # Config with generic config_data_node without storage_type
  97. # with custom properties
  98. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.csv")
  99. assert dn2.storage_type == "csv"
  100. assert dn2.default_path == "dn2.csv"
  101. assert dn2.has_header is False
  102. assert dn2.exposed_type == "numpy"
  103. assert dn2.scope == Scope.GLOBAL
  104. assert dn2.validity_period == timedelta(2)
  105. # Config a datanode with specific "storage_type" = "csv"
  106. # should use properties from the default datanode
  107. dn3 = Config.configure_data_node(
  108. id="dn3",
  109. storage_type="csv",
  110. default_path="dn3.csv",
  111. scope=Scope.SCENARIO,
  112. validity_period=timedelta(1),
  113. )
  114. assert dn3.storage_type == "csv"
  115. assert dn3.default_path == "dn3.csv"
  116. assert dn3.has_header is False
  117. assert dn3.exposed_type == "numpy"
  118. assert dn3.scope == Scope.SCENARIO
  119. assert dn3.validity_period == timedelta(1)
  120. def test_set_default_json_data_node_configuration():
  121. class MyCustomEncoder(json.JSONEncoder): ...
  122. class MyCustomDecoder(json.JSONDecoder): ...
  123. Config.set_default_data_node_configuration(
  124. storage_type="json",
  125. default_path="default.json",
  126. encoder=MyCustomEncoder,
  127. scope=Scope.GLOBAL,
  128. validity_period=timedelta(2),
  129. )
  130. # Config with generic config_data_node without storage_type
  131. # should return the default DataNode
  132. dn1 = Config.configure_data_node(id="dn1")
  133. assert dn1.storage_type == "json"
  134. assert dn1.default_path == "default.json"
  135. assert dn1.encoder == MyCustomEncoder
  136. assert dn1.decoder is None
  137. assert dn1.scope == Scope.GLOBAL
  138. assert dn1.validity_period == timedelta(2)
  139. # Config with generic config_data_node without storage_type
  140. # with custom properties
  141. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.json")
  142. assert dn2.storage_type == "json"
  143. assert dn2.default_path == "dn2.json"
  144. assert dn2.encoder == MyCustomEncoder
  145. assert dn2.decoder is None
  146. assert dn2.scope == Scope.GLOBAL
  147. assert dn2.validity_period == timedelta(2)
  148. # Config a datanode with specific "storage_type" = "json"
  149. # should use properties from the default datanode
  150. dn3 = Config.configure_data_node(
  151. id="dn3",
  152. storage_type="json",
  153. default_path="dn3.json",
  154. decoder=MyCustomDecoder,
  155. validity_period=timedelta(1),
  156. )
  157. assert dn3.storage_type == "json"
  158. assert dn3.default_path == "dn3.json"
  159. assert dn3.encoder == MyCustomEncoder
  160. assert dn3.decoder == MyCustomDecoder
  161. assert dn3.scope == Scope.GLOBAL
  162. assert dn3.validity_period == timedelta(1)
  163. @pytest.mark.skip(reason="Parquet not available in this version")
  164. def test_set_default_parquet_data_node_configuration():
  165. Config.set_default_data_node_configuration(
  166. storage_type="parquet",
  167. default_path="default.parquet",
  168. compression="gzip",
  169. exposed_type="numpy",
  170. scope=Scope.GLOBAL,
  171. validity_period=timedelta(2),
  172. )
  173. # Config with generic config_data_node without storage_type
  174. # should return the default DataNode
  175. dn1 = Config.configure_data_node(id="dn1")
  176. assert dn1.storage_type == "parquet"
  177. assert dn1.default_path == "default.parquet"
  178. assert dn1.engine == "pyarrow"
  179. assert dn1.compression == "gzip"
  180. assert dn1.read_kwargs is None
  181. assert dn1.write_kwargs is None
  182. assert dn1.exposed_type == "numpy"
  183. assert dn1.scope == Scope.GLOBAL
  184. assert dn1.validity_period == timedelta(2)
  185. # Config with generic config_data_node without storage_type
  186. # with custom properties
  187. dn2 = Config.configure_data_node(
  188. id="dn2",
  189. default_path="dn2.parquet",
  190. engine="fastparquet",
  191. )
  192. assert dn2.storage_type == "parquet"
  193. assert dn2.default_path == "dn2.parquet"
  194. assert dn2.engine == "fastparquet"
  195. assert dn2.compression == "gzip"
  196. assert dn2.read_kwargs is None
  197. assert dn2.write_kwargs is None
  198. assert dn2.exposed_type == "numpy"
  199. assert dn2.scope == Scope.GLOBAL
  200. assert dn2.validity_period == timedelta(2)
  201. # Config a datanode with specific "storage_type" = "parquet"
  202. # should use properties from the default datanode
  203. dn3 = Config.configure_data_node(
  204. id="dn3",
  205. storage_type="parquet",
  206. default_path="dn3.parquet",
  207. read_kwargs={"filter": "foo"},
  208. scope=Scope.SCENARIO,
  209. validity_period=timedelta(1),
  210. )
  211. assert dn3.storage_type == "parquet"
  212. assert dn3.default_path == "dn3.parquet"
  213. assert dn3.engine == "pyarrow"
  214. assert dn3.compression == "gzip"
  215. assert dn3.read_kwargs == {"filter": "foo"}
  216. assert dn3.write_kwargs is None
  217. assert dn3.exposed_type == "numpy"
  218. assert dn3.scope == Scope.SCENARIO
  219. assert dn3.validity_period == timedelta(1)
  220. def test_set_default_excel_data_node_configuration():
  221. Config.set_default_data_node_configuration(
  222. storage_type="excel",
  223. default_path="default.xlsx",
  224. has_header=False,
  225. exposed_type="numpy",
  226. scope=Scope.GLOBAL,
  227. validity_period=timedelta(2),
  228. )
  229. # Config with generic config_data_node without storage_type
  230. # should return the default DataNode
  231. dn1 = Config.configure_data_node(id="dn1")
  232. assert dn1.storage_type == "excel"
  233. assert dn1.scope == Scope.GLOBAL
  234. assert dn1.default_path == "default.xlsx"
  235. assert dn1.has_header is False
  236. assert dn1.sheet_name is None
  237. assert dn1.exposed_type == "numpy"
  238. assert dn1.validity_period == timedelta(2)
  239. # Config with generic config_data_node without storage_type
  240. # with custom properties
  241. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.xlsx", sheet_name="sheet_1")
  242. assert dn2.storage_type == "excel"
  243. assert dn2.default_path == "dn2.xlsx"
  244. assert dn2.has_header is False
  245. assert dn2.sheet_name == "sheet_1"
  246. assert dn2.exposed_type == "numpy"
  247. assert dn2.scope == Scope.GLOBAL
  248. assert dn2.validity_period == timedelta(2)
  249. # Config a datanode with specific "storage_type" = "excel"
  250. # should use properties from the default datanode
  251. dn3 = Config.configure_data_node(
  252. id="dn3",
  253. storage_type="excel",
  254. default_path="dn3.xlsx",
  255. scope=Scope.SCENARIO,
  256. validity_period=timedelta(1),
  257. )
  258. assert dn3.storage_type == "excel"
  259. assert dn3.default_path == "dn3.xlsx"
  260. assert dn3.has_header is False
  261. assert dn3.sheet_name is None
  262. assert dn3.exposed_type == "numpy"
  263. assert dn3.scope == Scope.SCENARIO
  264. assert dn3.validity_period == timedelta(1)
  265. def test_set_default_pickle_data_node_configuration():
  266. Config.set_default_data_node_configuration(
  267. storage_type="pickle",
  268. default_data=1,
  269. exposed_type="numpy",
  270. scope=Scope.GLOBAL,
  271. validity_period=timedelta(2),
  272. )
  273. # Config with generic config_data_node without storage_type
  274. # should return the default DataNode
  275. dn1 = Config.configure_data_node(id="dn1")
  276. assert dn1.storage_type == "pickle"
  277. assert dn1.scope == Scope.GLOBAL
  278. assert dn1.default_path is None
  279. assert dn1.default_data == 1
  280. assert dn1.exposed_type == "numpy"
  281. assert dn1.validity_period == timedelta(2)
  282. # Config with generic config_data_node without storage_type
  283. # with custom properties
  284. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.pkl", default_data=2)
  285. assert dn2.storage_type == "pickle"
  286. assert dn2.default_path == "dn2.pkl"
  287. assert dn2.default_data == 2
  288. assert dn2.exposed_type == "numpy"
  289. assert dn2.scope == Scope.GLOBAL
  290. assert dn2.validity_period == timedelta(2)
  291. # Config a datanode with specific "storage_type" = "pickle"
  292. # should use properties from the default datanode
  293. dn3 = Config.configure_data_node(
  294. id="dn3",
  295. storage_type="pickle",
  296. default_path="dn3.pkl",
  297. scope=Scope.SCENARIO,
  298. validity_period=timedelta(1),
  299. )
  300. assert dn3.storage_type == "pickle"
  301. assert dn3.default_path == "dn3.pkl"
  302. assert dn3.default_data == 1
  303. assert dn3.exposed_type == "numpy"
  304. assert dn3.scope == Scope.SCENARIO
  305. assert dn3.validity_period == timedelta(1)
  306. def test_set_default_sql_table_data_node_configuration():
  307. Config.set_default_data_node_configuration(
  308. storage_type="sql_table",
  309. db_username="default_user",
  310. db_password="default_pwd",
  311. db_name="default_db_name",
  312. db_engine="mssql",
  313. table_name="default_table",
  314. db_port=1010,
  315. db_host="default_host",
  316. db_driver="default server",
  317. db_extra_args={"default": "default"},
  318. scope=Scope.GLOBAL,
  319. validity_period=timedelta(2),
  320. )
  321. # Config with generic config_data_node without storage_type
  322. # should return the default DataNode
  323. dn1 = Config.configure_data_node(id="dn1")
  324. assert dn1.storage_type == "sql_table"
  325. assert dn1.db_username == "default_user"
  326. assert dn1.db_password == "default_pwd"
  327. assert dn1.db_name == "default_db_name"
  328. assert dn1.db_engine == "mssql"
  329. assert dn1.table_name == "default_table"
  330. assert dn1.db_port == 1010
  331. assert dn1.db_host == "default_host"
  332. assert dn1.db_driver == "default server"
  333. assert dn1.db_extra_args == {"default": "default"}
  334. assert dn1.scope == Scope.GLOBAL
  335. assert dn1.validity_period == timedelta(2)
  336. # Config with generic config_data_node without storage_type
  337. # with custom properties
  338. dn2 = Config.configure_data_node(
  339. id="dn2",
  340. table_name="table_2",
  341. db_port=2020,
  342. db_host="host_2",
  343. )
  344. assert dn2.storage_type == "sql_table"
  345. assert dn2.db_username == "default_user"
  346. assert dn2.db_password == "default_pwd"
  347. assert dn2.db_name == "default_db_name"
  348. assert dn2.db_engine == "mssql"
  349. assert dn2.table_name == "table_2"
  350. assert dn2.db_port == 2020
  351. assert dn2.db_host == "host_2"
  352. assert dn2.db_driver == "default server"
  353. assert dn2.db_extra_args == {"default": "default"}
  354. assert dn2.scope == Scope.GLOBAL
  355. assert dn2.validity_period == timedelta(2)
  356. # Config a datanode with specific "storage_type" = "sql_table"
  357. # should use properties from the default datanode
  358. dn3 = Config.configure_data_node(
  359. id="dn3",
  360. storage_type="sql_table",
  361. db_username="user_3",
  362. db_password="pwd_3",
  363. db_name="db_3",
  364. db_engine="postgresql",
  365. table_name="table_3",
  366. validity_period=timedelta(1),
  367. )
  368. assert dn3.storage_type == "sql_table"
  369. assert dn3.db_username == "user_3"
  370. assert dn3.db_password == "pwd_3"
  371. assert dn3.db_name == "db_3"
  372. assert dn3.db_engine == "postgresql"
  373. assert dn3.table_name == "table_3"
  374. assert dn3.db_port == 1010
  375. assert dn3.db_host == "default_host"
  376. assert dn3.db_driver == "default server"
  377. assert dn3.db_extra_args == {"default": "default"}
  378. assert dn3.scope == Scope.GLOBAL
  379. assert dn3.validity_period == timedelta(1)
  380. def test_set_default_sql_data_node_configuration():
  381. def query_builder(): ...
  382. Config.set_default_data_node_configuration(
  383. storage_type="sql",
  384. db_username="default_user",
  385. db_password="default_pwd",
  386. db_name="default_db_name",
  387. db_engine="mssql",
  388. read_query="SELECT * FROM default_table",
  389. write_query_builder=query_builder,
  390. append_query_builder=query_builder,
  391. db_port=1010,
  392. db_host="default_host",
  393. db_driver="default server",
  394. db_extra_args={"default": "default"},
  395. scope=Scope.GLOBAL,
  396. validity_period=timedelta(2),
  397. )
  398. # Config with generic config_data_node without storage_type
  399. # should return the default DataNode
  400. dn1 = Config.configure_data_node(id="dn1")
  401. assert dn1.storage_type == "sql"
  402. assert dn1.db_username == "default_user"
  403. assert dn1.db_password == "default_pwd"
  404. assert dn1.db_name == "default_db_name"
  405. assert dn1.db_engine == "mssql"
  406. assert dn1.read_query == "SELECT * FROM default_table"
  407. assert dn1.write_query_builder == query_builder
  408. assert dn1.append_query_builder == query_builder
  409. assert dn1.db_port == 1010
  410. assert dn1.db_host == "default_host"
  411. assert dn1.db_driver == "default server"
  412. assert dn1.db_extra_args == {"default": "default"}
  413. assert dn1.scope == Scope.GLOBAL
  414. assert dn1.validity_period == timedelta(2)
  415. # Config with generic config_data_node without storage_type
  416. # with custom properties
  417. dn2 = Config.configure_data_node(
  418. id="dn2", table_name="table_2", db_port=2020, db_host="host_2", read_query="SELECT * FROM table_2"
  419. )
  420. assert dn2.storage_type == "sql"
  421. assert dn2.db_username == "default_user"
  422. assert dn2.db_password == "default_pwd"
  423. assert dn2.db_name == "default_db_name"
  424. assert dn2.db_engine == "mssql"
  425. assert dn2.read_query == "SELECT * FROM table_2"
  426. assert dn2.write_query_builder == query_builder
  427. assert dn2.append_query_builder == query_builder
  428. assert dn2.db_port == 2020
  429. assert dn2.db_host == "host_2"
  430. assert dn2.db_driver == "default server"
  431. assert dn2.db_extra_args == {"default": "default"}
  432. assert dn2.scope == Scope.GLOBAL
  433. assert dn2.validity_period == timedelta(2)
  434. # Config a datanode with specific "storage_type" = "sql"
  435. # should use properties from the default datanode
  436. dn3 = Config.configure_data_node(
  437. id="dn3",
  438. storage_type="sql",
  439. db_username="user_3",
  440. db_password="pwd_3",
  441. db_name="db_3",
  442. db_engine="postgresql",
  443. read_query="SELECT * FROM table_3",
  444. write_query_builder=query_builder,
  445. validity_period=timedelta(1),
  446. )
  447. assert dn3.storage_type == "sql"
  448. assert dn3.db_username == "user_3"
  449. assert dn3.db_password == "pwd_3"
  450. assert dn3.db_name == "db_3"
  451. assert dn3.db_engine == "postgresql"
  452. assert dn3.read_query == "SELECT * FROM table_3"
  453. assert dn3.write_query_builder == query_builder
  454. assert dn3.append_query_builder == query_builder
  455. assert dn3.db_port == 1010
  456. assert dn3.db_host == "default_host"
  457. assert dn3.db_driver == "default server"
  458. assert dn3.db_extra_args == {"default": "default"}
  459. assert dn3.scope == Scope.GLOBAL
  460. assert dn3.validity_period == timedelta(1)
  461. def test_set_default_mongo_collection_data_node_configuration():
  462. Config.set_default_data_node_configuration(
  463. storage_type="mongo_collection",
  464. db_name="default_db_name",
  465. collection_name="default_collection",
  466. db_port=1010,
  467. db_host="default_host",
  468. db_driver="default server",
  469. db_extra_args={"default": "default"},
  470. scope=Scope.GLOBAL,
  471. validity_period=timedelta(2),
  472. )
  473. # Config with generic config_data_node without storage_type
  474. # should return the default DataNode
  475. dn1 = Config.configure_data_node(id="dn1")
  476. assert dn1.storage_type == "mongo_collection"
  477. assert dn1.db_username == ""
  478. assert dn1.db_password == ""
  479. assert dn1.db_name == "default_db_name"
  480. assert dn1.collection_name == "default_collection"
  481. assert dn1.custom_document == MongoDefaultDocument
  482. assert dn1.db_host == "default_host"
  483. assert dn1.db_port == 1010
  484. assert dn1.db_driver == "default server"
  485. assert dn1.db_extra_args == {"default": "default"}
  486. assert dn1.scope == Scope.GLOBAL
  487. assert dn1.validity_period == timedelta(2)
  488. # Config with generic config_data_node without storage_type
  489. # with custom properties
  490. dn2 = Config.configure_data_node(
  491. id="dn2",
  492. collection_name="collection_2",
  493. db_port=2020,
  494. db_host="host_2",
  495. )
  496. assert dn2.storage_type == "mongo_collection"
  497. assert dn2.db_username == ""
  498. assert dn2.db_password == ""
  499. assert dn2.db_name == "default_db_name"
  500. assert dn2.collection_name == "collection_2"
  501. assert dn2.custom_document == MongoDefaultDocument
  502. assert dn2.db_host == "host_2"
  503. assert dn2.db_port == 2020
  504. assert dn2.db_driver == "default server"
  505. assert dn2.db_extra_args == {"default": "default"}
  506. assert dn2.scope == Scope.GLOBAL
  507. assert dn2.validity_period == timedelta(2)
  508. # Config a datanode with specific "storage_type" = "mongo_collection"
  509. # should use properties from the default datanode
  510. dn3 = Config.configure_data_node(
  511. id="dn3",
  512. storage_type="mongo_collection",
  513. db_name="db_3",
  514. collection_name="collection_3",
  515. db_username="user_3",
  516. db_password="pwd_3",
  517. validity_period=timedelta(1),
  518. )
  519. assert dn3.storage_type == "mongo_collection"
  520. assert dn3.db_username == "user_3"
  521. assert dn3.db_password == "pwd_3"
  522. assert dn3.db_name == "db_3"
  523. assert dn3.collection_name == "collection_3"
  524. assert dn3.custom_document == MongoDefaultDocument
  525. assert dn3.db_port == 1010
  526. assert dn3.db_host == "default_host"
  527. assert dn3.db_driver == "default server"
  528. assert dn3.db_extra_args == {"default": "default"}
  529. assert dn3.scope == Scope.GLOBAL
  530. assert dn3.validity_period == timedelta(1)
  531. def test_set_default_s3_object_data_node_configuration():
  532. Config.set_default_data_node_configuration(
  533. storage_type="s3_object",
  534. aws_access_key="default_access_key",
  535. aws_secret_access_key="default_secret_acces_key",
  536. aws_s3_bucket_name="default_bucket_name",
  537. aws_s3_object_key="default_object_key",
  538. aws_region="",
  539. aws_s3_object_parameters={"default": "default"},
  540. scope=Scope.GLOBAL,
  541. validity_period=timedelta(2),
  542. )
  543. # Config with generic config_data_node without storage_type
  544. # should return the default DataNode
  545. dn1 = Config.configure_data_node(id="dn1")
  546. assert dn1.storage_type == "s3_object"
  547. assert dn1.aws_access_key == "default_access_key"
  548. assert dn1.aws_secret_access_key == "default_secret_acces_key"
  549. assert dn1.aws_s3_bucket_name == "default_bucket_name"
  550. assert dn1.aws_s3_object_key == "default_object_key"
  551. assert dn1.aws_region == ""
  552. assert dn1.aws_s3_object_parameters == {"default": "default"}
  553. assert dn1.scope == Scope.GLOBAL
  554. assert dn1.validity_period == timedelta(2)
  555. # Config with generic config_data_node without storage_type
  556. # with custom properties
  557. dn2 = Config.configure_data_node(
  558. id="dn2",
  559. aws_access_key="custom_access_key_2",
  560. aws_secret_access_key="custom_secret_acces_key_2",
  561. aws_s3_bucket_name="custom_bucket_name_2",
  562. aws_s3_object_key="custom_object_key_2",
  563. )
  564. assert dn2.storage_type == "s3_object"
  565. assert dn2.aws_access_key == "custom_access_key_2"
  566. assert dn2.aws_secret_access_key == "custom_secret_acces_key_2"
  567. assert dn2.aws_s3_bucket_name == "custom_bucket_name_2"
  568. assert dn2.aws_s3_object_key == "custom_object_key_2"
  569. assert dn2.aws_region == ""
  570. assert dn2.aws_s3_object_parameters == {"default": "default"}
  571. assert dn2.scope == Scope.GLOBAL
  572. assert dn2.validity_period == timedelta(2)
  573. # Config a datanode with specific "storage_type" = "s3_object"
  574. # should use properties from the default datanode
  575. dn3 = Config.configure_data_node(
  576. id="dn3",
  577. storage_type="s3_object",
  578. aws_access_key="custom_access_key_3",
  579. aws_secret_access_key="custom_secret_acces_key_3",
  580. aws_s3_bucket_name="custom_bucket_name_3",
  581. aws_s3_object_key="custom_object_key_3",
  582. aws_region="",
  583. aws_s3_object_parameters={"default": "default"},
  584. scope=Scope.GLOBAL,
  585. validity_period=timedelta(1),
  586. )
  587. assert dn3.storage_type == "s3_object"
  588. assert dn3.aws_access_key == "custom_access_key_3"
  589. assert dn3.aws_secret_access_key == "custom_secret_acces_key_3"
  590. assert dn3.aws_s3_bucket_name == "custom_bucket_name_3"
  591. assert dn3.aws_s3_object_key == "custom_object_key_3"
  592. assert dn3.aws_region == ""
  593. assert dn3.aws_s3_object_parameters == {"default": "default"}
  594. assert dn3.scope == Scope.GLOBAL
  595. assert dn3.validity_period == timedelta(1)