import json import os import subprocess import tarfile import tempfile import time from datetime import datetime def run_db_dump(job, instance, backup_dir): """Point d'entrée commun mysql et postgresql.""" if job.type == "mysql": return _run_mysql(job, instance, backup_dir) elif job.type == "postgresql": return _run_postgresql(job, instance, backup_dir) raise ValueError(f"Type inconnu pour db_dump : {job.type}") # --------------------------------------------------------------------------- # MySQL # --------------------------------------------------------------------------- def _run_mysql(job, instance, backup_dir): from flask import current_app cfg = json.loads(job.config_json or "{}") dbname = cfg.get("database", "") if not dbname: raise ValueError("Nom de base de données manquant dans la configuration du job.") archive_name = _archive_name(instance, "mysql", dbname, backup_dir) with tempfile.TemporaryDirectory() as tmpdir: dump_path = os.path.join(tmpdir, f"{dbname}.sql") result = subprocess.run( [ "sudo", "mysqldump", "--single-transaction", "--routines", "--triggers", "--result-file", dump_path, dbname, ], capture_output=True, text=True, timeout=7200, ) log = (result.stdout + result.stderr).strip() if result.returncode != 0: raise RuntimeError(f"mysqldump a échoué (code {result.returncode}) :\n{log}") _write_tar(tmpdir, dump_path, dbname, archive_name, backup_dir, job, instance, current_app.config.get("INSTANCE_URL", "")) return archive_name, log or "mysqldump terminé sans sortie." # --------------------------------------------------------------------------- # PostgreSQL # --------------------------------------------------------------------------- def _run_postgresql(job, instance, backup_dir): from flask import current_app cfg = json.loads(job.config_json or "{}") dbname = cfg.get("database", "") if not dbname: raise ValueError("Nom de base de données manquant dans la configuration du job.") archive_name = _archive_name(instance, "postgresql", dbname, backup_dir) with tempfile.TemporaryDirectory() as tmpdir: dump_path = os.path.join(tmpdir, f"{dbname}.sql") # pg_dump doit tourner en tant qu'utilisateur postgres result = subprocess.run( ["sudo", "-u", "postgres", "pg_dump", "--format=plain", dbname], capture_output=True, timeout=7200, ) if result.returncode != 0: log = result.stderr.decode("utf-8", errors="replace").strip() raise RuntimeError(f"pg_dump a échoué (code {result.returncode}) :\n{log}") with open(dump_path, "wb") as f: f.write(result.stdout) log = result.stderr.decode("utf-8", errors="replace").strip() _write_tar(tmpdir, dump_path, dbname, archive_name, backup_dir, job, instance, current_app.config.get("INSTANCE_URL", "")) return archive_name, log or "pg_dump terminé sans sortie." # --------------------------------------------------------------------------- # Helpers partagés # --------------------------------------------------------------------------- def _archive_name(instance, db_type, dbname, backup_dir): from jobs.utils import unique_archive_name date_str = datetime.utcnow().strftime("%Y%m%d") return unique_archive_name(f"{instance}_{db_type}_{dbname}_{date_str}", backup_dir) # --------------------------------------------------------------------------- # Restore # --------------------------------------------------------------------------- def restore_db_dump(archive_name, backup_dir): """Restauration d'une base MySQL ou PostgreSQL depuis une archive BackupManager.""" archive_path = os.path.join(backup_dir, archive_name + ".tar") from jobs.utils import sudo_exists if not sudo_exists(archive_path): raise FileNotFoundError(f"Archive introuvable : {archive_path}") info = _read_backup_info(archive_path) db_type = info.get("type") dbname = info.get("database", "") if not dbname: raise ValueError("Nom de base de données introuvable dans backup_info.json.") with tempfile.TemporaryDirectory() as tmpdir: dump_path = os.path.join(tmpdir, f"{dbname}.sql") # Extraction du dump depuis l'archive with tarfile.open(archive_path) as tar: member = tar.getmember(f"db/{dbname}.sql") with tar.extractfile(member) as src, open(dump_path, "wb") as dst: dst.write(src.read()) if db_type == "mysql": return _restore_mysql(dbname, dump_path) elif db_type == "postgresql": return _restore_postgresql(dbname, dump_path) else: raise ValueError(f"Type de base inconnu dans l'archive : {db_type}") def _restore_mysql(dbname, dump_path): log_lines = [] # Suppression + recréation propre de la base result = subprocess.run( ["sudo", "mysql", "-e", f"DROP DATABASE IF EXISTS `{dbname}`; CREATE DATABASE `{dbname}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"], capture_output=True, text=True, timeout=60, ) if result.returncode != 0: raise RuntimeError(f"Impossible de recréer la base MySQL '{dbname}' :\n{result.stderr.strip()}") log_lines.append(f"Base MySQL '{dbname}' recréée.") # Restauration du dump with open(dump_path, "rb") as f: result = subprocess.run( ["sudo", "mysql", dbname], stdin=f, capture_output=True, timeout=7200, ) log = result.stderr.decode("utf-8", errors="replace").strip() if result.returncode != 0: raise RuntimeError(f"mysql restore a échoué (code {result.returncode}) :\n{log}") log_lines.append(f"Dump restauré dans '{dbname}'.") if log: log_lines.append(log) return "\n".join(log_lines) def _restore_postgresql(dbname, dump_path): log_lines = [] # Terminer les connexions actives puis drop + recreate subprocess.run( ["sudo", "-u", "postgres", "psql", "-c", f"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '{dbname}' AND pid <> pg_backend_pid();"], capture_output=True, timeout=30, ) subprocess.run( ["sudo", "-u", "postgres", "dropdb", "--if-exists", dbname], capture_output=True, timeout=60, ) result = subprocess.run( ["sudo", "-u", "postgres", "createdb", dbname], capture_output=True, text=True, timeout=60, ) if result.returncode != 0: raise RuntimeError(f"Impossible de recréer la base PostgreSQL '{dbname}' :\n{result.stderr.strip()}") log_lines.append(f"Base PostgreSQL '{dbname}' recréée.") # Restauration du dump with open(dump_path, "rb") as f: result = subprocess.run( ["sudo", "-u", "postgres", "psql", "-d", dbname, "-v", "ON_ERROR_STOP=1"], stdin=f, capture_output=True, timeout=7200, ) log = result.stderr.decode("utf-8", errors="replace").strip() if result.returncode != 0: raise RuntimeError(f"psql restore a échoué (code {result.returncode}) :\n{log}") log_lines.append(f"Dump restauré dans '{dbname}'.") if log: log_lines.append(log) return "\n".join(log_lines) def _read_backup_info(archive_path): try: with tarfile.open(archive_path) as tar: member = tar.extractfile("backup_info.json") if member: return json.loads(member.read()) except Exception: pass return {} def _write_tar(tmpdir, dump_path, dbname, archive_name, backup_dir, job, instance, instance_url): """Crée le .tar dans tmpdir puis le copie dans backup_dir via sudo rsync.""" import json as _json from jobs.utils import sudo_getsize # backup_info.json embarqué dans le tar info = { "instance_name": instance, "instance_url": instance_url, "type": job.type, "database": dbname, "created_at": datetime.utcnow().isoformat(), "backupmanager_version": "1.0.0", } info_path = os.path.join(tmpdir, "backup_info.json") with open(info_path, "w") as f: _json.dump(info, f, indent=2) # Créer le tar dans tmpdir (accessible par backupmanager) tmp_archive = os.path.join(tmpdir, archive_name + ".tar") with tarfile.open(tmp_archive, "w") as tar: tar.add(dump_path, arcname=f"db/{dbname}.sql") tar.add(info_path, arcname="backup_info.json") # Copier vers backup_dir via sudo rsync (backup_dir est 750 root) archive_path = os.path.join(backup_dir, archive_name + ".tar") result = subprocess.run( ["sudo", "rsync", tmp_archive, archive_path], capture_output=True, text=True, ) if result.returncode != 0: raise RuntimeError(f"Copie de l'archive échouée : {result.stderr.strip()}") # .info.json YunoHost dans tmpdir puis copie via sudo rsync size = sudo_getsize(archive_path) ynh_info = { "created_at": int(time.time()), "description": f"BackupManager: {job.type} {dbname}", "size": size, "from_before_upgrade": False, "apps": {}, "system": {}, } tmp_ynh_info = os.path.join(tmpdir, archive_name + ".info.json") with open(tmp_ynh_info, "w") as f: _json.dump(ynh_info, f, indent=2) subprocess.run( ["sudo", "rsync", tmp_ynh_info, os.path.join(backup_dir, archive_name + ".info.json")], capture_output=True, )