db_dump.py 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270
  1. import json
  2. import os
  3. import subprocess
  4. import tempfile
  5. import time
  6. from datetime import datetime
  7. def run_db_dump(job, instance, backup_dir):
  8. """Point d'entrée commun mysql et postgresql."""
  9. if job.type == "mysql":
  10. return _run_mysql(job, instance, backup_dir)
  11. elif job.type == "postgresql":
  12. return _run_postgresql(job, instance, backup_dir)
  13. raise ValueError(f"Type inconnu pour db_dump : {job.type}")
  14. # ---------------------------------------------------------------------------
  15. # MySQL
  16. # ---------------------------------------------------------------------------
  17. def _run_mysql(job, instance, backup_dir):
  18. from flask import current_app
  19. cfg = json.loads(job.config_json or "{}")
  20. dbname = cfg.get("database", "")
  21. if not dbname:
  22. raise ValueError("Nom de base de données manquant dans la configuration du job.")
  23. archive_name = _archive_name(instance, "mysql", dbname, backup_dir)
  24. with tempfile.TemporaryDirectory() as tmpdir:
  25. dump_path = os.path.join(tmpdir, f"{dbname}.sql")
  26. result = subprocess.run(
  27. [
  28. "sudo", "mysqldump",
  29. "--single-transaction",
  30. "--routines",
  31. "--triggers",
  32. "--result-file", dump_path,
  33. dbname,
  34. ],
  35. capture_output=True,
  36. text=True,
  37. timeout=7200,
  38. )
  39. log = (result.stdout + result.stderr).strip()
  40. if result.returncode != 0:
  41. raise RuntimeError(f"mysqldump a échoué (code {result.returncode}) :\n{log}")
  42. _write_tar(tmpdir, dump_path, dbname, archive_name, backup_dir, job,
  43. instance, current_app.config.get("INSTANCE_URL", ""))
  44. return archive_name, log or "mysqldump terminé sans sortie."
  45. # ---------------------------------------------------------------------------
  46. # PostgreSQL
  47. # ---------------------------------------------------------------------------
  48. def _run_postgresql(job, instance, backup_dir):
  49. from flask import current_app
  50. cfg = json.loads(job.config_json or "{}")
  51. dbname = cfg.get("database", "")
  52. if not dbname:
  53. raise ValueError("Nom de base de données manquant dans la configuration du job.")
  54. archive_name = _archive_name(instance, "postgresql", dbname, backup_dir)
  55. with tempfile.TemporaryDirectory() as tmpdir:
  56. dump_path = os.path.join(tmpdir, f"{dbname}.sql")
  57. # pg_dump doit tourner en tant qu'utilisateur postgres
  58. result = subprocess.run(
  59. ["sudo", "-u", "postgres", "pg_dump", "--format=plain", dbname],
  60. capture_output=True,
  61. timeout=7200,
  62. )
  63. if result.returncode != 0:
  64. log = result.stderr.decode("utf-8", errors="replace").strip()
  65. raise RuntimeError(f"pg_dump a échoué (code {result.returncode}) :\n{log}")
  66. with open(dump_path, "wb") as f:
  67. f.write(result.stdout)
  68. log = result.stderr.decode("utf-8", errors="replace").strip()
  69. _write_tar(tmpdir, dump_path, dbname, archive_name, backup_dir, job,
  70. instance, current_app.config.get("INSTANCE_URL", ""))
  71. return archive_name, log or "pg_dump terminé sans sortie."
  72. # ---------------------------------------------------------------------------
  73. # Helpers partagés
  74. # ---------------------------------------------------------------------------
  75. def _archive_name(instance, db_type, dbname, backup_dir):
  76. from jobs.utils import unique_archive_name
  77. date_str = datetime.utcnow().strftime("%Y%m%d")
  78. return unique_archive_name(f"{instance}_{db_type}_{dbname}_{date_str}", backup_dir)
  79. # ---------------------------------------------------------------------------
  80. # Restore
  81. # ---------------------------------------------------------------------------
  82. def restore_db_dump(archive_name, backup_dir):
  83. """Restauration d'une base MySQL ou PostgreSQL depuis une archive BackupManager."""
  84. archive_path = os.path.join(backup_dir, archive_name + ".tar")
  85. from jobs.utils import sudo_exists
  86. if not sudo_exists(archive_path):
  87. raise FileNotFoundError(f"Archive introuvable : {archive_path}")
  88. info = _read_backup_info(archive_path)
  89. db_type = info.get("type")
  90. dbname = info.get("database", "")
  91. if not dbname:
  92. raise ValueError("Nom de base de données introuvable dans backup_info.json.")
  93. with tempfile.TemporaryDirectory() as tmpdir:
  94. dump_path = os.path.join(tmpdir, f"{dbname}.sql")
  95. # Extraction du dump depuis l'archive via sudo (backup_dir est 750 root)
  96. result = subprocess.run(
  97. ["sudo", "tar", "-xOf", archive_path, f"db/{dbname}.sql"],
  98. capture_output=True,
  99. )
  100. if result.returncode != 0:
  101. err = result.stderr.decode("utf-8", errors="replace").strip()
  102. raise RuntimeError(f"Extraction du dump échouée : {err}")
  103. with open(dump_path, "wb") as dst:
  104. dst.write(result.stdout)
  105. if db_type == "mysql":
  106. return _restore_mysql(dbname, dump_path)
  107. elif db_type == "postgresql":
  108. return _restore_postgresql(dbname, dump_path)
  109. else:
  110. raise ValueError(f"Type de base inconnu dans l'archive : {db_type}")
  111. def _restore_mysql(dbname, dump_path):
  112. log_lines = []
  113. # Suppression + recréation propre de la base
  114. result = subprocess.run(
  115. ["sudo", "mysql", "-e",
  116. f"DROP DATABASE IF EXISTS `{dbname}`; CREATE DATABASE `{dbname}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"],
  117. capture_output=True, text=True, timeout=60,
  118. )
  119. if result.returncode != 0:
  120. raise RuntimeError(f"Impossible de recréer la base MySQL '{dbname}' :\n{result.stderr.strip()}")
  121. log_lines.append(f"Base MySQL '{dbname}' recréée.")
  122. # Restauration du dump
  123. with open(dump_path, "rb") as f:
  124. result = subprocess.run(
  125. ["sudo", "mysql", dbname],
  126. stdin=f,
  127. capture_output=True,
  128. timeout=7200,
  129. )
  130. log = result.stderr.decode("utf-8", errors="replace").strip()
  131. if result.returncode != 0:
  132. raise RuntimeError(f"mysql restore a échoué (code {result.returncode}) :\n{log}")
  133. log_lines.append(f"Dump restauré dans '{dbname}'.")
  134. if log:
  135. log_lines.append(log)
  136. return "\n".join(log_lines)
  137. def _restore_postgresql(dbname, dump_path):
  138. log_lines = []
  139. # Terminer les connexions actives puis drop + recreate
  140. subprocess.run(
  141. ["sudo", "-u", "postgres", "psql", "-c",
  142. f"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '{dbname}' AND pid <> pg_backend_pid();"],
  143. capture_output=True, timeout=30,
  144. )
  145. subprocess.run(
  146. ["sudo", "-u", "postgres", "dropdb", "--if-exists", dbname],
  147. capture_output=True, timeout=60,
  148. )
  149. result = subprocess.run(
  150. ["sudo", "-u", "postgres", "createdb", dbname],
  151. capture_output=True, text=True, timeout=60,
  152. )
  153. if result.returncode != 0:
  154. raise RuntimeError(f"Impossible de recréer la base PostgreSQL '{dbname}' :\n{result.stderr.strip()}")
  155. log_lines.append(f"Base PostgreSQL '{dbname}' recréée.")
  156. # Restauration du dump
  157. with open(dump_path, "rb") as f:
  158. result = subprocess.run(
  159. ["sudo", "-u", "postgres", "psql", "-d", dbname, "-v", "ON_ERROR_STOP=1"],
  160. stdin=f,
  161. capture_output=True,
  162. timeout=7200,
  163. )
  164. log = result.stderr.decode("utf-8", errors="replace").strip()
  165. if result.returncode != 0:
  166. raise RuntimeError(f"psql restore a échoué (code {result.returncode}) :\n{log}")
  167. log_lines.append(f"Dump restauré dans '{dbname}'.")
  168. if log:
  169. log_lines.append(log)
  170. return "\n".join(log_lines)
  171. def _read_backup_info(archive_path):
  172. from jobs.utils import sudo_read_backup_info
  173. return sudo_read_backup_info(archive_path)
  174. def _write_tar(tmpdir, dump_path, dbname, archive_name, backup_dir, job, instance, instance_url):
  175. """Crée le .tar dans tmpdir puis le copie dans backup_dir via sudo rsync."""
  176. import json as _json
  177. from jobs.utils import sudo_getsize
  178. # backup_info.json embarqué dans le tar
  179. info = {
  180. "instance_name": instance,
  181. "instance_url": instance_url,
  182. "type": job.type,
  183. "database": dbname,
  184. "created_at": datetime.utcnow().isoformat(),
  185. "backupmanager_version": "1.0.0",
  186. }
  187. info_path = os.path.join(tmpdir, "backup_info.json")
  188. with open(info_path, "w") as f:
  189. _json.dump(info, f, indent=2)
  190. # Créer le tar dans tmpdir (accessible par backupmanager)
  191. tmp_archive = os.path.join(tmpdir, archive_name + ".tar")
  192. with tarfile.open(tmp_archive, "w") as tar:
  193. tar.add(dump_path, arcname=f"db/{dbname}.sql")
  194. tar.add(info_path, arcname="backup_info.json")
  195. # Copier vers backup_dir via sudo rsync (backup_dir est 750 root)
  196. archive_path = os.path.join(backup_dir, archive_name + ".tar")
  197. result = subprocess.run(
  198. ["sudo", "rsync", tmp_archive, archive_path],
  199. capture_output=True, text=True,
  200. )
  201. if result.returncode != 0:
  202. raise RuntimeError(f"Copie de l'archive échouée : {result.stderr.strip()}")
  203. # .info.json YunoHost dans tmpdir puis copie via sudo rsync
  204. size = sudo_getsize(archive_path)
  205. ynh_info = {
  206. "created_at": int(time.time()),
  207. "description": f"BackupManager: {job.type} {dbname}",
  208. "size": size,
  209. "from_before_upgrade": False,
  210. "apps": {},
  211. "system": {},
  212. }
  213. tmp_ynh_info = os.path.join(tmpdir, archive_name + ".info.json")
  214. with open(tmp_ynh_info, "w") as f:
  215. _json.dump(ynh_info, f, indent=2)
  216. subprocess.run(
  217. ["sudo", "rsync", tmp_ynh_info,
  218. os.path.join(backup_dir, archive_name + ".info.json")],
  219. capture_output=True,
  220. )