test_backup.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310
  1. # Copyright 2021-2024 Avaiga Private Limited
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
  4. # the License. You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
  9. # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
  10. # specific language governing permissions and limitations under the License.
  11. import os
  12. from unittest.mock import patch
  13. import pytest
  14. from taipy.config.config import Config
  15. from taipy.core import Core
  16. from taipy.core.data._data_manager import _DataManager
  17. from taipy.core.data.csv import CSVDataNode
  18. from taipy.core.data.excel import ExcelDataNode
  19. from taipy.core.data.json import JSONDataNode
  20. from taipy.core.data.parquet import ParquetDataNode
  21. from taipy.core.data.pickle import PickleDataNode
  22. def read_backup_file(path):
  23. with open(path, "r") as f:
  24. lines = f.readlines()
  25. return lines
  26. @pytest.fixture(scope="function", autouse=True)
  27. def init_backup_file():
  28. os.environ["TAIPY_BACKUP_FILE_PATH"] = ".taipy_backups"
  29. if os.path.exists(os.environ["TAIPY_BACKUP_FILE_PATH"]):
  30. os.remove(os.environ["TAIPY_BACKUP_FILE_PATH"])
  31. yield
  32. if os.path.exists(".taipy_backups"):
  33. os.remove(".taipy_backups")
  34. del os.environ["TAIPY_BACKUP_FILE_PATH"]
  35. backup_file_path = ".taipy_backups"
  36. def test_backup_storage_folder_when_core_run():
  37. with patch("sys.argv", ["prog"]):
  38. core = Core()
  39. core.run()
  40. backup_files = read_backup_file(backup_file_path)
  41. assert backup_files == [f"{Config.core.storage_folder}\n"]
  42. core.stop()
  43. def test_no_new_entry_when_file_is_in_storage_folder():
  44. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", path="dn_1.pickle")
  45. dn_cfg_2 = Config.configure_data_node("dn_cfg_2") # stored in .data folder
  46. dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  47. dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  48. dn_1.write("DN1_CONTENT")
  49. dn_2.write("DN2_CONTENT")
  50. backup_files = read_backup_file(backup_file_path)
  51. assert backup_files == [f"{dn_1.path}\n"]
  52. os.remove(dn_1.path)
  53. def test_backup_csv_files():
  54. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "csv", path="example_1.csv")
  55. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "csv", path="example_2.csv")
  56. csv_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  57. assert isinstance(csv_dn_1, CSVDataNode)
  58. backup_files = read_backup_file(backup_file_path)
  59. assert backup_files == [f"{csv_dn_1.path}\n"]
  60. csv_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  61. assert isinstance(csv_dn_2, CSVDataNode)
  62. backup_files = read_backup_file(backup_file_path)
  63. assert backup_files == [f"{csv_dn_1.path}\n", f"{csv_dn_2.path}\n"]
  64. csv_dn_1.path = "example_3.csv"
  65. backup_files = read_backup_file(backup_file_path)
  66. assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_1.path}\n"]
  67. csv_dn_2.path = "example_4.csv"
  68. backup_files = read_backup_file(backup_file_path)
  69. assert backup_files == [f"{csv_dn_1.path}\n", f"{csv_dn_2.path}\n"]
  70. _DataManager._delete(csv_dn_1.id)
  71. backup_files = read_backup_file(backup_file_path)
  72. assert backup_files == [f"{csv_dn_2.path}\n"]
  73. csv_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  74. csv_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  75. assert isinstance(csv_dn_3, CSVDataNode)
  76. assert isinstance(csv_dn_4, CSVDataNode)
  77. backup_files = read_backup_file(backup_file_path)
  78. assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_3.path}\n", f"{csv_dn_4.path}\n"]
  79. csv_dn_4.path = "example_5.csv"
  80. backup_files = read_backup_file(backup_file_path)
  81. assert backup_files == [f"{csv_dn_2.path}\n", f"{csv_dn_3.path}\n", f"{csv_dn_4.path}\n"]
  82. _DataManager._delete_all()
  83. backup_files = read_backup_file(backup_file_path)
  84. assert backup_files == []
  85. def test_backup_excel_files():
  86. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "excel", path="example_1.xlsx")
  87. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "excel", path="example_2.xlsx")
  88. excel_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  89. assert isinstance(excel_dn_1, ExcelDataNode)
  90. backup_files = read_backup_file(backup_file_path)
  91. assert backup_files == [f"{excel_dn_1.path}\n"]
  92. excel_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  93. assert isinstance(excel_dn_2, ExcelDataNode)
  94. backup_files = read_backup_file(backup_file_path)
  95. assert backup_files == [f"{excel_dn_1.path}\n", f"{excel_dn_2.path}\n"]
  96. excel_dn_1.path = "example_3.excel"
  97. backup_files = read_backup_file(backup_file_path)
  98. assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_1.path}\n"]
  99. excel_dn_2.path = "example_4.excel"
  100. backup_files = read_backup_file(backup_file_path)
  101. assert backup_files == [f"{excel_dn_1.path}\n", f"{excel_dn_2.path}\n"]
  102. _DataManager._delete(excel_dn_1.id)
  103. backup_files = read_backup_file(backup_file_path)
  104. assert backup_files == [f"{excel_dn_2.path}\n"]
  105. excel_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  106. excel_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  107. assert isinstance(excel_dn_3, ExcelDataNode)
  108. assert isinstance(excel_dn_4, ExcelDataNode)
  109. backup_files = read_backup_file(backup_file_path)
  110. assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_3.path}\n", f"{excel_dn_4.path}\n"]
  111. excel_dn_4.path = "example_5.excel"
  112. backup_files = read_backup_file(backup_file_path)
  113. assert backup_files == [f"{excel_dn_2.path}\n", f"{excel_dn_3.path}\n", f"{excel_dn_4.path}\n"]
  114. _DataManager._delete_all()
  115. backup_files = read_backup_file(backup_file_path)
  116. assert backup_files == []
  117. def test_backup_pickle_files():
  118. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "pickle", path="example_1.p")
  119. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "pickle", path="example_2.p")
  120. pickle_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  121. assert isinstance(pickle_dn_1, PickleDataNode)
  122. backup_files = read_backup_file(backup_file_path)
  123. assert backup_files == [f"{pickle_dn_1.path}\n"]
  124. pickle_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  125. assert isinstance(pickle_dn_2, PickleDataNode)
  126. backup_files = read_backup_file(backup_file_path)
  127. assert backup_files == [f"{pickle_dn_1.path}\n", f"{pickle_dn_2.path}\n"]
  128. pickle_dn_1.path = "example_3.pickle"
  129. backup_files = read_backup_file(backup_file_path)
  130. assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_1.path}\n"]
  131. pickle_dn_2.path = "example_4.pickle"
  132. backup_files = read_backup_file(backup_file_path)
  133. assert backup_files == [f"{pickle_dn_1.path}\n", f"{pickle_dn_2.path}\n"]
  134. _DataManager._delete(pickle_dn_1.id)
  135. backup_files = read_backup_file(backup_file_path)
  136. assert backup_files == [f"{pickle_dn_2.path}\n"]
  137. pickle_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  138. pickle_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  139. assert isinstance(pickle_dn_3, PickleDataNode)
  140. assert isinstance(pickle_dn_4, PickleDataNode)
  141. backup_files = read_backup_file(backup_file_path)
  142. assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_3.path}\n", f"{pickle_dn_4.path}\n"]
  143. pickle_dn_4.path = "example_5.pickle"
  144. backup_files = read_backup_file(backup_file_path)
  145. assert backup_files == [f"{pickle_dn_2.path}\n", f"{pickle_dn_3.path}\n", f"{pickle_dn_4.path}\n"]
  146. _DataManager._delete_all()
  147. backup_files = read_backup_file(backup_file_path)
  148. assert backup_files == []
  149. def test_backup_json_files():
  150. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "json", path="example_1.json")
  151. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "json", path="example_2.json")
  152. json_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  153. assert isinstance(json_dn_1, JSONDataNode)
  154. backup_files = read_backup_file(backup_file_path)
  155. assert backup_files == [f"{json_dn_1.path}\n"]
  156. json_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  157. assert isinstance(json_dn_2, JSONDataNode)
  158. backup_files = read_backup_file(backup_file_path)
  159. assert backup_files == [f"{json_dn_1.path}\n", f"{json_dn_2.path}\n"]
  160. json_dn_1.path = "example_3.json"
  161. backup_files = read_backup_file(backup_file_path)
  162. assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_1.path}\n"]
  163. json_dn_2.path = "example_4.json"
  164. backup_files = read_backup_file(backup_file_path)
  165. assert backup_files == [f"{json_dn_1.path}\n", f"{json_dn_2.path}\n"]
  166. _DataManager._delete(json_dn_1.id)
  167. backup_files = read_backup_file(backup_file_path)
  168. assert backup_files == [f"{json_dn_2.path}\n"]
  169. json_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  170. json_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  171. assert isinstance(json_dn_3, JSONDataNode)
  172. assert isinstance(json_dn_4, JSONDataNode)
  173. backup_files = read_backup_file(backup_file_path)
  174. assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_3.path}\n", f"{json_dn_4.path}\n"]
  175. json_dn_4.path = "example_5.json"
  176. backup_files = read_backup_file(backup_file_path)
  177. assert backup_files == [f"{json_dn_2.path}\n", f"{json_dn_3.path}\n", f"{json_dn_4.path}\n"]
  178. _DataManager._delete_all()
  179. backup_files = read_backup_file(backup_file_path)
  180. assert backup_files == []
  181. def test_backup_parquet_files():
  182. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "parquet", path="example_1.parquet")
  183. dn_cfg_2 = Config.configure_data_node("dn_cfg_2", "parquet", path="example_2.parquet")
  184. parquet_dn_1 = _DataManager._create_and_set(dn_cfg_1, None, None)
  185. assert isinstance(parquet_dn_1, ParquetDataNode)
  186. backup_files = read_backup_file(backup_file_path)
  187. assert backup_files == [f"{parquet_dn_1.path}\n"]
  188. parquet_dn_2 = _DataManager._create_and_set(dn_cfg_2, None, None)
  189. assert isinstance(parquet_dn_2, ParquetDataNode)
  190. backup_files = read_backup_file(backup_file_path)
  191. assert backup_files == [f"{parquet_dn_1.path}\n", f"{parquet_dn_2.path}\n"]
  192. parquet_dn_1.path = "example_3.parquet"
  193. backup_files = read_backup_file(backup_file_path)
  194. assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_1.path}\n"]
  195. parquet_dn_2.path = "example_4.parquet"
  196. backup_files = read_backup_file(backup_file_path)
  197. assert backup_files == [f"{parquet_dn_1.path}\n", f"{parquet_dn_2.path}\n"]
  198. _DataManager._delete(parquet_dn_1.id)
  199. backup_files = read_backup_file(backup_file_path)
  200. assert backup_files == [f"{parquet_dn_2.path}\n"]
  201. parquet_dn_3 = _DataManager._create_and_set(dn_cfg_1, None, None)
  202. parquet_dn_4 = _DataManager._create_and_set(dn_cfg_1, None, None)
  203. assert isinstance(parquet_dn_3, ParquetDataNode)
  204. assert isinstance(parquet_dn_4, ParquetDataNode)
  205. backup_files = read_backup_file(backup_file_path)
  206. assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_3.path}\n", f"{parquet_dn_4.path}\n"]
  207. parquet_dn_4.path = "example_5.parquet"
  208. backup_files = read_backup_file(backup_file_path)
  209. assert backup_files == [f"{parquet_dn_2.path}\n", f"{parquet_dn_3.path}\n", f"{parquet_dn_4.path}\n"]
  210. _DataManager._delete_all()
  211. backup_files = read_backup_file(backup_file_path)
  212. assert backup_files == []
  213. def test_no_backup_if_no_env_var():
  214. dn_cfg_1 = Config.configure_data_node("dn_cfg_1", "csv", path="example_1.csv")
  215. _DataManager._create_and_set(dn_cfg_1, None, None)