1
0

test_backup.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. import pytest
  13. from taipy.config.config import Config
  14. from taipy.core import Core
  15. from taipy.core.data._data_manager import _DataManager
  16. from taipy.core.data.csv import CSVDataNode
  17. from taipy.core.data.excel import ExcelDataNode
  18. from taipy.core.data.json import JSONDataNode
  19. from taipy.core.data.parquet import ParquetDataNode
  20. from taipy.core.data.pickle import PickleDataNode
  21. def read_backup_file(path):
  22. with open(path, "r") as f:
  23. lines = f.readlines()
  24. return lines
  25. @pytest.fixture(scope="function", autouse=True)
  26. def init_backup_file():
  27. os.environ["TAIPY_BACKUP_FILE_PATH"] = ".taipy_backups"
  28. if os.path.exists(os.environ["TAIPY_BACKUP_FILE_PATH"]):
  29. os.remove(os.environ["TAIPY_BACKUP_FILE_PATH"])
  30. yield
  31. if os.path.exists(".taipy_backups"):
  32. os.remove(".taipy_backups")
  33. del os.environ["TAIPY_BACKUP_FILE_PATH"]
  34. backup_file_path = ".taipy_backups"
  35. def test_backup_storage_folder_when_core_run():
  36. core = Core()
  37. core.run()
  38. backup_files = read_backup_file(backup_file_path)
  39. assert backup_files == [f"{Config.core.storage_folder}\n"]
  40. core.stop()
  41. def test_no_new_entry_when_file_is_in_storage_folder():
  42. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", path="dn_1.pickle")
  43. dn_cfg_2 = Config.configure_data_node("dn_cfg_2") # stored in .data folder
  44. dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  45. dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  46. dn_1.write("DN1_CONTENT")
  47. dn_2.write("DN2_CONTENT")
  48. backup_files = read_backup_file(backup_file_path)
  49. assert backup_files == [f"{dn_1.path}\n"]
  50. os.remove(dn_1.path)
  51. def test_backup_csv_files():
  52. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "csv", path="example_1.csv")
  53. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "csv", path="example_2.csv")
  54. csv_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  55. assert isinstance(csv_dn_1, CSVDataNode)
  56. backup_files = read_backup_file(backup_file_path)
  57. assert backup_files == [f"{csv_dn_1.path}\n"]
  58. csv_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  59. assert isinstance(csv_dn_2, CSVDataNode)
  60. backup_files = read_backup_file(backup_file_path)
  61. assert backup_files == [f"{csv_dn_1.path}\n", f"{csv_dn_2.path}\n"]
  62. csv_dn_1.path = "example_3.csv"
  63. backup_files = read_backup_file(backup_file_path)
  64. assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_1.path}\n"]
  65. csv_dn_2.path = "example_4.csv"
  66. backup_files = read_backup_file(backup_file_path)
  67. assert backup_files == [f"{csv_dn_1.path}\n", f"{csv_dn_2.path}\n"]
  68. _DataManager._delete(csv_dn_1.id)
  69. backup_files = read_backup_file(backup_file_path)
  70. assert backup_files == [f"{csv_dn_2.path}\n"]
  71. csv_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  72. csv_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  73. assert isinstance(csv_dn_3, CSVDataNode)
  74. assert isinstance(csv_dn_4, CSVDataNode)
  75. backup_files = read_backup_file(backup_file_path)
  76. assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_3.path}\n", f"{csv_dn_4.path}\n"]
  77. csv_dn_4.path = "example_5.csv"
  78. backup_files = read_backup_file(backup_file_path)
  79. assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_3.path}\n", f"{csv_dn_4.path}\n"]
  80. _DataManager._delete_all()
  81. backup_files = read_backup_file(backup_file_path)
  82. assert backup_files == []
  83. def test_backup_excel_files():
  84. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "excel", path="example_1.xlsx")
  85. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "excel", path="example_2.xlsx")
  86. excel_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  87. assert isinstance(excel_dn_1, ExcelDataNode)
  88. backup_files = read_backup_file(backup_file_path)
  89. assert backup_files == [f"{excel_dn_1.path}\n"]
  90. excel_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  91. assert isinstance(excel_dn_2, ExcelDataNode)
  92. backup_files = read_backup_file(backup_file_path)
  93. assert backup_files == [f"{excel_dn_1.path}\n", f"{excel_dn_2.path}\n"]
  94. excel_dn_1.path = "example_3.excel"
  95. backup_files = read_backup_file(backup_file_path)
  96. assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_1.path}\n"]
  97. excel_dn_2.path = "example_4.excel"
  98. backup_files = read_backup_file(backup_file_path)
  99. assert backup_files == [f"{excel_dn_1.path}\n", f"{excel_dn_2.path}\n"]
  100. _DataManager._delete(excel_dn_1.id)
  101. backup_files = read_backup_file(backup_file_path)
  102. assert backup_files == [f"{excel_dn_2.path}\n"]
  103. excel_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  104. excel_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  105. assert isinstance(excel_dn_3, ExcelDataNode)
  106. assert isinstance(excel_dn_4, ExcelDataNode)
  107. backup_files = read_backup_file(backup_file_path)
  108. assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_3.path}\n", f"{excel_dn_4.path}\n"]
  109. excel_dn_4.path = "example_5.excel"
  110. backup_files = read_backup_file(backup_file_path)
  111. assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_3.path}\n", f"{excel_dn_4.path}\n"]
  112. _DataManager._delete_all()
  113. backup_files = read_backup_file(backup_file_path)
  114. assert backup_files == []
  115. def test_backup_pickle_files():
  116. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "pickle", path="example_1.p")
  117. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "pickle", path="example_2.p")
  118. pickle_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  119. assert isinstance(pickle_dn_1, PickleDataNode)
  120. backup_files = read_backup_file(backup_file_path)
  121. assert backup_files == [f"{pickle_dn_1.path}\n"]
  122. pickle_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  123. assert isinstance(pickle_dn_2, PickleDataNode)
  124. backup_files = read_backup_file(backup_file_path)
  125. assert backup_files == [f"{pickle_dn_1.path}\n", f"{pickle_dn_2.path}\n"]
  126. pickle_dn_1.path = "example_3.pickle"
  127. backup_files = read_backup_file(backup_file_path)
  128. assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_1.path}\n"]
  129. pickle_dn_2.path = "example_4.pickle"
  130. backup_files = read_backup_file(backup_file_path)
  131. assert backup_files == [f"{pickle_dn_1.path}\n", f"{pickle_dn_2.path}\n"]
  132. _DataManager._delete(pickle_dn_1.id)
  133. backup_files = read_backup_file(backup_file_path)
  134. assert backup_files == [f"{pickle_dn_2.path}\n"]
  135. pickle_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  136. pickle_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  137. assert isinstance(pickle_dn_3, PickleDataNode)
  138. assert isinstance(pickle_dn_4, PickleDataNode)
  139. backup_files = read_backup_file(backup_file_path)
  140. assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_3.path}\n", f"{pickle_dn_4.path}\n"]
  141. pickle_dn_4.path = "example_5.pickle"
  142. backup_files = read_backup_file(backup_file_path)
  143. assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_3.path}\n", f"{pickle_dn_4.path}\n"]
  144. _DataManager._delete_all()
  145. backup_files = read_backup_file(backup_file_path)
  146. assert backup_files == []
  147. def test_backup_json_files():
  148. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "json", path="example_1.json")
  149. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "json", path="example_2.json")
  150. json_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  151. assert isinstance(json_dn_1, JSONDataNode)
  152. backup_files = read_backup_file(backup_file_path)
  153. assert backup_files == [f"{json_dn_1.path}\n"]
  154. json_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  155. assert isinstance(json_dn_2, JSONDataNode)
  156. backup_files = read_backup_file(backup_file_path)
  157. assert backup_files == [f"{json_dn_1.path}\n", f"{json_dn_2.path}\n"]
  158. json_dn_1.path = "example_3.json"
  159. backup_files = read_backup_file(backup_file_path)
  160. assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_1.path}\n"]
  161. json_dn_2.path = "example_4.json"
  162. backup_files = read_backup_file(backup_file_path)
  163. assert backup_files == [f"{json_dn_1.path}\n", f"{json_dn_2.path}\n"]
  164. _DataManager._delete(json_dn_1.id)
  165. backup_files = read_backup_file(backup_file_path)
  166. assert backup_files == [f"{json_dn_2.path}\n"]
  167. json_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  168. json_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  169. assert isinstance(json_dn_3, JSONDataNode)
  170. assert isinstance(json_dn_4, JSONDataNode)
  171. backup_files = read_backup_file(backup_file_path)
  172. assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_3.path}\n", f"{json_dn_4.path}\n"]
  173. json_dn_4.path = "example_5.json"
  174. backup_files = read_backup_file(backup_file_path)
  175. assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_3.path}\n", f"{json_dn_4.path}\n"]
  176. _DataManager._delete_all()
  177. backup_files = read_backup_file(backup_file_path)
  178. assert backup_files == []
  179. def test_backup_parquet_files():
  180. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "parquet", path="example_1.parquet")
  181. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "parquet", path="example_2.parquet")
  182. parquet_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  183. assert isinstance(parquet_dn_1, ParquetDataNode)
  184. backup_files = read_backup_file(backup_file_path)
  185. assert backup_files == [f"{parquet_dn_1.path}\n"]
  186. parquet_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  187. assert isinstance(parquet_dn_2, ParquetDataNode)
  188. backup_files = read_backup_file(backup_file_path)
  189. assert backup_files == [f"{parquet_dn_1.path}\n", f"{parquet_dn_2.path}\n"]
  190. parquet_dn_1.path = "example_3.parquet"
  191. backup_files = read_backup_file(backup_file_path)
  192. assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_1.path}\n"]
  193. parquet_dn_2.path = "example_4.parquet"
  194. backup_files = read_backup_file(backup_file_path)
  195. assert backup_files == [f"{parquet_dn_1.path}\n", f"{parquet_dn_2.path}\n"]
  196. _DataManager._delete(parquet_dn_1.id)
  197. backup_files = read_backup_file(backup_file_path)
  198. assert backup_files == [f"{parquet_dn_2.path}\n"]
  199. parquet_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  200. parquet_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  201. assert isinstance(parquet_dn_3, ParquetDataNode)
  202. assert isinstance(parquet_dn_4, ParquetDataNode)
  203. backup_files = read_backup_file(backup_file_path)
  204. assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_3.path}\n", f"{parquet_dn_4.path}\n"]
  205. parquet_dn_4.path = "example_5.parquet"
  206. backup_files = read_backup_file(backup_file_path)
  207. assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_3.path}\n", f"{parquet_dn_4.path}\n"]
  208. _DataManager._delete_all()
  209. backup_files = read_backup_file(backup_file_path)
  210. assert backup_files == []
  211. def test_no_backup_if_no_env_var():
  212. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "csv", path="example_1.csv")
  213. _DataManager._create_and_set(dn_cfg_1, None, None)