test_configure_default_config.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659
  1. # Copyright 2023 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import json
  12. from datetime import timedelta
  13. from taipy.config.common.scope import Scope
  14. from taipy.config.config import Config
  15. from taipy.core.common.mongo_default_document import MongoDefaultDocument
  16. def test_set_default_data_node_configuration():
  17. data_node1 = Config.configure_data_node(id="input_data1")
  18. assert data_node1.storage_type == "pickle"
  19. assert data_node1.scope == Scope.SCENARIO
  20. assert data_node1.validity_period is None
  21. Config.set_default_data_node_configuration("in_memory", scope=Scope.GLOBAL)
  22. data_node2 = Config.configure_data_node(id="input_data2")
  23. assert data_node2.storage_type == "in_memory"
  24. assert data_node2.scope == Scope.GLOBAL
  25. assert data_node2.validity_period is None
  26. Config.set_default_data_node_configuration("csv")
  27. data_node3 = Config.configure_data_node(id="input_data3")
  28. assert data_node3.storage_type == "csv"
  29. assert data_node3.scope == Scope.SCENARIO
  30. assert data_node3.validity_period is None
  31. Config.set_default_data_node_configuration("json", validity_period=timedelta(1))
  32. data_node4 = Config.configure_data_node(id="input_data4")
  33. assert data_node4.storage_type == "json"
  34. assert data_node4.scope == Scope.SCENARIO
  35. assert data_node4.validity_period == timedelta(1)
  36. Config.set_default_data_node_configuration("s3_object", validity_period=timedelta(1))
  37. data_node5 = Config.configure_data_node(id="input_data5")
  38. assert data_node5.storage_type == "s3_object"
  39. assert data_node5.scope == Scope.SCENARIO
  40. assert data_node5.validity_period == timedelta(1)
  41. def test_set_default_data_node_configuration_replace_old_default_config():
  42. Config.set_default_data_node_configuration(
  43. "in_memory",
  44. prop1="1",
  45. prop2="2",
  46. prop3="3",
  47. )
  48. dn1 = Config.configure_data_node(id="dn1")
  49. assert len(dn1.properties) == 3
  50. Config.set_default_data_node_configuration(
  51. "csv",
  52. prop4="4",
  53. prop5="5",
  54. prop6="6",
  55. )
  56. dn2 = Config.configure_data_node(id="dn2")
  57. assert dn2.storage_type == "csv"
  58. assert len(dn2.properties) == 6 # encoding, exposed_type, and has_header too
  59. assert dn2.prop4 == "4"
  60. assert dn2.prop5 == "5"
  61. assert dn2.prop6 == "6"
  62. assert dn2.prop1 is None
  63. assert dn2.prop2 is None
  64. assert dn2.prop3 is None
  65. def test_config_storage_type_different_from_default_data_node():
  66. Config.set_default_data_node_configuration(
  67. storage_type="pickle",
  68. custom_property={"foo": "bar"},
  69. scope=Scope.GLOBAL,
  70. )
  71. # Config a datanode with specific "storage_type" different than "pickle"
  72. # should ignore the default datanode
  73. csv_dn = Config.configure_data_node(id="csv_dn", storage_type="csv")
  74. assert len(csv_dn.properties) == 3 # encoding, exposed_type, and has_header
  75. assert csv_dn.properties.get("custom_property") is None
  76. assert csv_dn.scope == Scope.SCENARIO
  77. def test_set_default_csv_data_node_configuration():
  78. Config.set_default_data_node_configuration(
  79. storage_type="csv",
  80. default_path="default.csv",
  81. has_header=False,
  82. exposed_type="numpy",
  83. scope=Scope.GLOBAL,
  84. validity_period=timedelta(2),
  85. )
  86. # Config with generic config_data_node without storage_type
  87. # should return the default DataNode
  88. dn1 = Config.configure_data_node(id="dn1")
  89. assert dn1.storage_type == "csv"
  90. assert dn1.scope == Scope.GLOBAL
  91. assert dn1.default_path == "default.csv"
  92. assert dn1.has_header is False
  93. assert dn1.exposed_type == "numpy"
  94. assert dn1.validity_period == timedelta(2)
  95. # Config with generic config_data_node without storage_type
  96. # with custom properties
  97. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.csv")
  98. assert dn2.storage_type == "csv"
  99. assert dn2.default_path == "dn2.csv"
  100. assert dn2.has_header is False
  101. assert dn2.exposed_type == "numpy"
  102. assert dn2.scope == Scope.GLOBAL
  103. assert dn2.validity_period == timedelta(2)
  104. # Config a datanode with specific "storage_type" = "csv"
  105. # should use properties from the default datanode
  106. dn3 = Config.configure_data_node(
  107. id="dn3",
  108. storage_type="csv",
  109. default_path="dn3.csv",
  110. scope=Scope.SCENARIO,
  111. validity_period=timedelta(1),
  112. )
  113. assert dn3.storage_type == "csv"
  114. assert dn3.default_path == "dn3.csv"
  115. assert dn3.has_header is False
  116. assert dn3.exposed_type == "numpy"
  117. assert dn3.scope == Scope.SCENARIO
  118. assert dn3.validity_period == timedelta(1)
  119. def test_set_default_json_data_node_configuration():
  120. class MyCustomEncoder(json.JSONEncoder):
  121. ...
  122. class MyCustomDecoder(json.JSONDecoder):
  123. ...
  124. Config.set_default_data_node_configuration(
  125. storage_type="json",
  126. default_path="default.json",
  127. encoder=MyCustomEncoder,
  128. scope=Scope.GLOBAL,
  129. validity_period=timedelta(2),
  130. )
  131. # Config with generic config_data_node without storage_type
  132. # should return the default DataNode
  133. dn1 = Config.configure_data_node(id="dn1")
  134. assert dn1.storage_type == "json"
  135. assert dn1.default_path == "default.json"
  136. assert dn1.encoder == MyCustomEncoder
  137. assert dn1.decoder is None
  138. assert dn1.scope == Scope.GLOBAL
  139. assert dn1.validity_period == timedelta(2)
  140. # Config with generic config_data_node without storage_type
  141. # with custom properties
  142. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.json")
  143. assert dn2.storage_type == "json"
  144. assert dn2.default_path == "dn2.json"
  145. assert dn2.encoder == MyCustomEncoder
  146. assert dn2.decoder is None
  147. assert dn2.scope == Scope.GLOBAL
  148. assert dn2.validity_period == timedelta(2)
  149. # Config a datanode with specific "storage_type" = "json"
  150. # should use properties from the default datanode
  151. dn3 = Config.configure_data_node(
  152. id="dn3",
  153. storage_type="json",
  154. default_path="dn3.json",
  155. decoder=MyCustomDecoder,
  156. validity_period=timedelta(1),
  157. )
  158. assert dn3.storage_type == "json"
  159. assert dn3.default_path == "dn3.json"
  160. assert dn3.encoder == MyCustomEncoder
  161. assert dn3.decoder == MyCustomDecoder
  162. assert dn3.scope == Scope.GLOBAL
  163. assert dn3.validity_period == timedelta(1)
  164. def test_set_default_parquet_data_node_configuration():
  165. Config.set_default_data_node_configuration(
  166. storage_type="parquet",
  167. default_path="default.parquet",
  168. compression="gzip",
  169. exposed_type="numpy",
  170. scope=Scope.GLOBAL,
  171. validity_period=timedelta(2),
  172. )
  173. # Config with generic config_data_node without storage_type
  174. # should return the default DataNode
  175. dn1 = Config.configure_data_node(id="dn1")
  176. assert dn1.storage_type == "parquet"
  177. assert dn1.default_path == "default.parquet"
  178. assert dn1.engine == "pyarrow"
  179. assert dn1.compression == "gzip"
  180. assert dn1.read_kwargs is None
  181. assert dn1.write_kwargs is None
  182. assert dn1.exposed_type == "numpy"
  183. assert dn1.scope == Scope.GLOBAL
  184. assert dn1.validity_period == timedelta(2)
  185. # Config with generic config_data_node without storage_type
  186. # with custom properties
  187. dn2 = Config.configure_data_node(
  188. id="dn2",
  189. default_path="dn2.parquet",
  190. engine="fastparquet",
  191. )
  192. assert dn2.storage_type == "parquet"
  193. assert dn2.default_path == "dn2.parquet"
  194. assert dn2.engine == "fastparquet"
  195. assert dn2.compression == "gzip"
  196. assert dn2.read_kwargs is None
  197. assert dn2.write_kwargs is None
  198. assert dn2.exposed_type == "numpy"
  199. assert dn2.scope == Scope.GLOBAL
  200. assert dn2.validity_period == timedelta(2)
  201. # Config a datanode with specific "storage_type" = "parquet"
  202. # should use properties from the default datanode
  203. dn3 = Config.configure_data_node(
  204. id="dn3",
  205. storage_type="parquet",
  206. default_path="dn3.parquet",
  207. read_kwargs={"filter": "foo"},
  208. scope=Scope.SCENARIO,
  209. validity_period=timedelta(1),
  210. )
  211. assert dn3.storage_type == "parquet"
  212. assert dn3.default_path == "dn3.parquet"
  213. assert dn3.engine == "pyarrow"
  214. assert dn3.compression == "gzip"
  215. assert dn3.read_kwargs == {"filter": "foo"}
  216. assert dn3.write_kwargs is None
  217. assert dn3.exposed_type == "numpy"
  218. assert dn3.scope == Scope.SCENARIO
  219. assert dn3.validity_period == timedelta(1)
  220. def test_set_default_excel_data_node_configuration():
  221. Config.set_default_data_node_configuration(
  222. storage_type="excel",
  223. default_path="default.xlsx",
  224. has_header=False,
  225. exposed_type="numpy",
  226. scope=Scope.GLOBAL,
  227. validity_period=timedelta(2),
  228. )
  229. # Config with generic config_data_node without storage_type
  230. # should return the default DataNode
  231. dn1 = Config.configure_data_node(id="dn1")
  232. assert dn1.storage_type == "excel"
  233. assert dn1.scope == Scope.GLOBAL
  234. assert dn1.default_path == "default.xlsx"
  235. assert dn1.has_header is False
  236. assert dn1.sheet_name is None
  237. assert dn1.exposed_type == "numpy"
  238. assert dn1.validity_period == timedelta(2)
  239. # Config with generic config_data_node without storage_type
  240. # with custom properties
  241. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.xlsx", sheet_name="sheet_1")
  242. assert dn2.storage_type == "excel"
  243. assert dn2.default_path == "dn2.xlsx"
  244. assert dn2.has_header is False
  245. assert dn2.sheet_name == "sheet_1"
  246. assert dn2.exposed_type == "numpy"
  247. assert dn2.scope == Scope.GLOBAL
  248. assert dn2.validity_period == timedelta(2)
  249. # Config a datanode with specific "storage_type" = "excel"
  250. # should use properties from the default datanode
  251. dn3 = Config.configure_data_node(
  252. id="dn3",
  253. storage_type="excel",
  254. default_path="dn3.xlsx",
  255. scope=Scope.SCENARIO,
  256. validity_period=timedelta(1),
  257. )
  258. assert dn3.storage_type == "excel"
  259. assert dn3.default_path == "dn3.xlsx"
  260. assert dn3.has_header is False
  261. assert dn3.sheet_name is None
  262. assert dn3.exposed_type == "numpy"
  263. assert dn3.scope == Scope.SCENARIO
  264. assert dn3.validity_period == timedelta(1)
  265. def test_set_default_pickle_data_node_configuration():
  266. Config.set_default_data_node_configuration(
  267. storage_type="pickle",
  268. default_data=1,
  269. exposed_type="numpy",
  270. scope=Scope.GLOBAL,
  271. validity_period=timedelta(2),
  272. )
  273. # Config with generic config_data_node without storage_type
  274. # should return the default DataNode
  275. dn1 = Config.configure_data_node(id="dn1")
  276. assert dn1.storage_type == "pickle"
  277. assert dn1.scope == Scope.GLOBAL
  278. assert dn1.default_path is None
  279. assert dn1.default_data == 1
  280. assert dn1.exposed_type == "numpy"
  281. assert dn1.validity_period == timedelta(2)
  282. # Config with generic config_data_node without storage_type
  283. # with custom properties
  284. dn2 = Config.configure_data_node(id="dn2", default_path="dn2.pkl", default_data=2)
  285. assert dn2.storage_type == "pickle"
  286. assert dn2.default_path == "dn2.pkl"
  287. assert dn2.default_data == 2
  288. assert dn2.exposed_type == "numpy"
  289. assert dn2.scope == Scope.GLOBAL
  290. assert dn2.validity_period == timedelta(2)
  291. # Config a datanode with specific "storage_type" = "pickle"
  292. # should use properties from the default datanode
  293. dn3 = Config.configure_data_node(
  294. id="dn3",
  295. storage_type="pickle",
  296. default_path="dn3.pkl",
  297. scope=Scope.SCENARIO,
  298. validity_period=timedelta(1),
  299. )
  300. assert dn3.storage_type == "pickle"
  301. assert dn3.default_path == "dn3.pkl"
  302. assert dn3.default_data == 1
  303. assert dn3.exposed_type == "numpy"
  304. assert dn3.scope == Scope.SCENARIO
  305. assert dn3.validity_period == timedelta(1)
  306. def test_set_default_sql_table_data_node_configuration():
  307. Config.set_default_data_node_configuration(
  308. storage_type="sql_table",
  309. db_username="default_user",
  310. db_password="default_pwd",
  311. db_name="default_db_name",
  312. db_engine="mssql",
  313. table_name="default_table",
  314. db_port=1010,
  315. db_host="default_host",
  316. db_driver="default server",
  317. db_extra_args={"default": "default"},
  318. scope=Scope.GLOBAL,
  319. validity_period=timedelta(2),
  320. )
  321. # Config with generic config_data_node without storage_type
  322. # should return the default DataNode
  323. dn1 = Config.configure_data_node(id="dn1")
  324. assert dn1.storage_type == "sql_table"
  325. assert dn1.db_username == "default_user"
  326. assert dn1.db_password == "default_pwd"
  327. assert dn1.db_name == "default_db_name"
  328. assert dn1.db_engine == "mssql"
  329. assert dn1.table_name == "default_table"
  330. assert dn1.db_port == 1010
  331. assert dn1.db_host == "default_host"
  332. assert dn1.db_driver == "default server"
  333. assert dn1.db_extra_args == {"default": "default"}
  334. assert dn1.scope == Scope.GLOBAL
  335. assert dn1.validity_period == timedelta(2)
  336. # Config with generic config_data_node without storage_type
  337. # with custom properties
  338. dn2 = Config.configure_data_node(
  339. id="dn2",
  340. table_name="table_2",
  341. db_port=2020,
  342. db_host="host_2",
  343. )
  344. assert dn2.storage_type == "sql_table"
  345. assert dn2.db_username == "default_user"
  346. assert dn2.db_password == "default_pwd"
  347. assert dn2.db_name == "default_db_name"
  348. assert dn2.db_engine == "mssql"
  349. assert dn2.table_name == "table_2"
  350. assert dn2.db_port == 2020
  351. assert dn2.db_host == "host_2"
  352. assert dn2.db_driver == "default server"
  353. assert dn2.db_extra_args == {"default": "default"}
  354. assert dn2.scope == Scope.GLOBAL
  355. assert dn2.validity_period == timedelta(2)
  356. # Config a datanode with specific "storage_type" = "sql_table"
  357. # should use properties from the default datanode
  358. dn3 = Config.configure_data_node(
  359. id="dn3",
  360. storage_type="sql_table",
  361. db_username="user_3",
  362. db_password="pwd_3",
  363. db_name="db_3",
  364. db_engine="postgresql",
  365. table_name="table_3",
  366. validity_period=timedelta(1),
  367. )
  368. assert dn3.storage_type == "sql_table"
  369. assert dn3.db_username == "user_3"
  370. assert dn3.db_password == "pwd_3"
  371. assert dn3.db_name == "db_3"
  372. assert dn3.db_engine == "postgresql"
  373. assert dn3.table_name == "table_3"
  374. assert dn3.db_port == 1010
  375. assert dn3.db_host == "default_host"
  376. assert dn3.db_driver == "default server"
  377. assert dn3.db_extra_args == {"default": "default"}
  378. assert dn3.scope == Scope.GLOBAL
  379. assert dn3.validity_period == timedelta(1)
  380. def test_set_default_sql_data_node_configuration():
  381. def query_builder():
  382. ...
  383. Config.set_default_data_node_configuration(
  384. storage_type="sql",
  385. db_username="default_user",
  386. db_password="default_pwd",
  387. db_name="default_db_name",
  388. db_engine="mssql",
  389. read_query="SELECT * FROM default_table",
  390. write_query_builder=query_builder,
  391. append_query_builder=query_builder,
  392. db_port=1010,
  393. db_host="default_host",
  394. db_driver="default server",
  395. db_extra_args={"default": "default"},
  396. scope=Scope.GLOBAL,
  397. validity_period=timedelta(2),
  398. )
  399. # Config with generic config_data_node without storage_type
  400. # should return the default DataNode
  401. dn1 = Config.configure_data_node(id="dn1")
  402. assert dn1.storage_type == "sql"
  403. assert dn1.db_username == "default_user"
  404. assert dn1.db_password == "default_pwd"
  405. assert dn1.db_name == "default_db_name"
  406. assert dn1.db_engine == "mssql"
  407. assert dn1.read_query == "SELECT * FROM default_table"
  408. assert dn1.write_query_builder == query_builder
  409. assert dn1.append_query_builder == query_builder
  410. assert dn1.db_port == 1010
  411. assert dn1.db_host == "default_host"
  412. assert dn1.db_driver == "default server"
  413. assert dn1.db_extra_args == {"default": "default"}
  414. assert dn1.scope == Scope.GLOBAL
  415. assert dn1.validity_period == timedelta(2)
  416. # Config with generic config_data_node without storage_type
  417. # with custom properties
  418. dn2 = Config.configure_data_node(
  419. id="dn2", table_name="table_2", db_port=2020, db_host="host_2", read_query="SELECT * FROM table_2"
  420. )
  421. assert dn2.storage_type == "sql"
  422. assert dn2.db_username == "default_user"
  423. assert dn2.db_password == "default_pwd"
  424. assert dn2.db_name == "default_db_name"
  425. assert dn2.db_engine == "mssql"
  426. assert dn2.read_query == "SELECT * FROM table_2"
  427. assert dn2.write_query_builder == query_builder
  428. assert dn2.append_query_builder == query_builder
  429. assert dn2.db_port == 2020
  430. assert dn2.db_host == "host_2"
  431. assert dn2.db_driver == "default server"
  432. assert dn2.db_extra_args == {"default": "default"}
  433. assert dn2.scope == Scope.GLOBAL
  434. assert dn2.validity_period == timedelta(2)
  435. # Config a datanode with specific "storage_type" = "sql"
  436. # should use properties from the default datanode
  437. dn3 = Config.configure_data_node(
  438. id="dn3",
  439. storage_type="sql",
  440. db_username="user_3",
  441. db_password="pwd_3",
  442. db_name="db_3",
  443. db_engine="postgresql",
  444. read_query="SELECT * FROM table_3",
  445. write_query_builder=query_builder,
  446. validity_period=timedelta(1),
  447. )
  448. assert dn3.storage_type == "sql"
  449. assert dn3.db_username == "user_3"
  450. assert dn3.db_password == "pwd_3"
  451. assert dn3.db_name == "db_3"
  452. assert dn3.db_engine == "postgresql"
  453. assert dn3.read_query == "SELECT * FROM table_3"
  454. assert dn3.write_query_builder == query_builder
  455. assert dn3.append_query_builder == query_builder
  456. assert dn3.db_port == 1010
  457. assert dn3.db_host == "default_host"
  458. assert dn3.db_driver == "default server"
  459. assert dn3.db_extra_args == {"default": "default"}
  460. assert dn3.scope == Scope.GLOBAL
  461. assert dn3.validity_period == timedelta(1)
  462. def test_set_default_mongo_collection_data_node_configuration():
  463. Config.set_default_data_node_configuration(
  464. storage_type="mongo_collection",
  465. db_name="default_db_name",
  466. collection_name="default_collection",
  467. db_port=1010,
  468. db_host="default_host",
  469. db_driver="default server",
  470. db_extra_args={"default": "default"},
  471. scope=Scope.GLOBAL,
  472. validity_period=timedelta(2),
  473. )
  474. # Config with generic config_data_node without storage_type
  475. # should return the default DataNode
  476. dn1 = Config.configure_data_node(id="dn1")
  477. assert dn1.storage_type == "mongo_collection"
  478. assert dn1.db_username == ""
  479. assert dn1.db_password == ""
  480. assert dn1.db_name == "default_db_name"
  481. assert dn1.collection_name == "default_collection"
  482. assert dn1.custom_document == MongoDefaultDocument
  483. assert dn1.db_host == "default_host"
  484. assert dn1.db_port == 1010
  485. assert dn1.db_driver == "default server"
  486. assert dn1.db_extra_args == {"default": "default"}
  487. assert dn1.scope == Scope.GLOBAL
  488. assert dn1.validity_period == timedelta(2)
  489. # Config with generic config_data_node without storage_type
  490. # with custom properties
  491. dn2 = Config.configure_data_node(
  492. id="dn2",
  493. collection_name="collection_2",
  494. db_port=2020,
  495. db_host="host_2",
  496. )
  497. assert dn2.storage_type == "mongo_collection"
  498. assert dn2.db_username == ""
  499. assert dn2.db_password == ""
  500. assert dn2.db_name == "default_db_name"
  501. assert dn2.collection_name == "collection_2"
  502. assert dn2.custom_document == MongoDefaultDocument
  503. assert dn2.db_host == "host_2"
  504. assert dn2.db_port == 2020
  505. assert dn2.db_driver == "default server"
  506. assert dn2.db_extra_args == {"default": "default"}
  507. assert dn2.scope == Scope.GLOBAL
  508. assert dn2.validity_period == timedelta(2)
  509. # Config a datanode with specific "storage_type" = "mongo_collection"
  510. # should use properties from the default datanode
  511. dn3 = Config.configure_data_node(
  512. id="dn3",
  513. storage_type="mongo_collection",
  514. db_name="db_3",
  515. collection_name="collection_3",
  516. db_username="user_3",
  517. db_password="pwd_3",
  518. validity_period=timedelta(1),
  519. )
  520. assert dn3.storage_type == "mongo_collection"
  521. assert dn3.db_username == "user_3"
  522. assert dn3.db_password == "pwd_3"
  523. assert dn3.db_name == "db_3"
  524. assert dn3.collection_name == "collection_3"
  525. assert dn3.custom_document == MongoDefaultDocument
  526. assert dn3.db_port == 1010
  527. assert dn3.db_host == "default_host"
  528. assert dn3.db_driver == "default server"
  529. assert dn3.db_extra_args == {"default": "default"}
  530. assert dn3.scope == Scope.GLOBAL
  531. assert dn3.validity_period == timedelta(1)
  532. def test_set_default_s3_object_data_node_configuration():
  533. Config.set_default_data_node_configuration(
  534. storage_type="s3_object",
  535. aws_access_key="default_access_key",
  536. aws_secret_access_key="default_secret_acces_key",
  537. aws_s3_bucket_name="default_bucket_name",
  538. aws_s3_object_key="default_object_key",
  539. aws_region="",
  540. aws_s3_object_parameters={"default": "default"},
  541. scope=Scope.GLOBAL,
  542. validity_period=timedelta(2),
  543. )
  544. # Config with generic config_data_node without storage_type
  545. # should return the default DataNode
  546. dn1 = Config.configure_data_node(id="dn1")
  547. assert dn1.storage_type == "s3_object"
  548. assert dn1.aws_access_key == "default_access_key"
  549. assert dn1.aws_secret_access_key == "default_secret_acces_key"
  550. assert dn1.aws_s3_bucket_name == "default_bucket_name"
  551. assert dn1.aws_s3_object_key == "default_object_key"
  552. assert dn1.aws_region == ""
  553. assert dn1.aws_s3_object_parameters == {"default": "default"}
  554. assert dn1.scope == Scope.GLOBAL
  555. assert dn1.validity_period == timedelta(2)
  556. # Config with generic config_data_node without storage_type
  557. # with custom properties
  558. dn2 = Config.configure_data_node(
  559. id="dn2",
  560. aws_access_key="custom_access_key_2",
  561. aws_secret_access_key="custom_secret_acces_key_2",
  562. aws_s3_bucket_name="custom_bucket_name_2",
  563. aws_s3_object_key="custom_object_key_2",
  564. )
  565. assert dn2.storage_type == "s3_object"
  566. assert dn2.aws_access_key == "custom_access_key_2"
  567. assert dn2.aws_secret_access_key == "custom_secret_acces_key_2"
  568. assert dn2.aws_s3_bucket_name == "custom_bucket_name_2"
  569. assert dn2.aws_s3_object_key == "custom_object_key_2"
  570. assert dn2.aws_region == ""
  571. assert dn2.aws_s3_object_parameters == {"default": "default"}
  572. assert dn2.scope == Scope.GLOBAL
  573. assert dn2.validity_period == timedelta(2)
  574. # Config a datanode with specific "storage_type" = "s3_object"
  575. # should use properties from the default datanode
  576. dn3 = Config.configure_data_node(
  577. id="dn3",
  578. storage_type="s3_object",
  579. aws_access_key="custom_access_key_3",
  580. aws_secret_access_key="custom_secret_acces_key_3",
  581. aws_s3_bucket_name="custom_bucket_name_3",
  582. aws_s3_object_key="custom_object_key_3",
  583. aws_region="",
  584. aws_s3_object_parameters={"default": "default"},
  585. scope=Scope.GLOBAL,
  586. validity_period=timedelta(1),
  587. )
  588. assert dn3.storage_type == "s3_object"
  589. assert dn3.aws_access_key == "custom_access_key_3"
  590. assert dn3.aws_secret_access_key == "custom_secret_acces_key_3"
  591. assert dn3.aws_s3_bucket_name == "custom_bucket_name_3"
  592. assert dn3.aws_s3_object_key == "custom_object_key_3"
  593. assert dn3.aws_region == ""
  594. assert dn3.aws_s3_object_parameters == {"default": "default"}
  595. assert dn3.scope == Scope.GLOBAL
  596. assert dn3.validity_period == timedelta(1)