| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990 |
- import os
- import re
- from datetime import datetime, timedelta
- def apply_retention(job, new_archive_name, backup_dir):
- """Applique la politique de rétention après une sauvegarde réussie."""
- archives = _list_archives_for_job(job, backup_dir)
- if job.retention_mode == "count":
- to_delete = _retention_count(archives, job.retention_value)
- elif job.retention_mode == "daily":
- to_delete = _retention_daily(archives, job.retention_value)
- else:
- return []
- deleted = []
- for archive_filename in to_delete:
- base = os.path.splitext(archive_filename)[0]
- for ext in (".tar", ".info.json"):
- full = os.path.join(backup_dir, base + ext)
- if os.path.exists(full):
- os.remove(full)
- deleted.append(base + ext)
- return deleted
- def _list_archives_for_job(job, backup_dir):
- """Liste les archives correspondant à ce job, triées par date (plus ancienne en premier)."""
- from flask import current_app
- instance = current_app.config["INSTANCE_NAME"]
- if job.type == "ynh_app":
- import json
- cfg = json.loads(job.config_json or "{}")
- app_id = cfg.get("app_id", "")
- prefix = f"{instance}_{app_id}_"
- elif job.type == "ynh_system":
- prefix = f"{instance}_system_"
- elif job.type in ("mysql", "postgresql"):
- import json
- cfg = json.loads(job.config_json or "{}")
- dbname = cfg.get("database", "")
- prefix = f"{instance}_{job.type}_{dbname}_"
- elif job.type == "custom_dir":
- import re
- label = re.sub(r'[^a-z0-9]+', '-', job.name.lower().strip()).strip('-')
- prefix = f"{instance}_{label}_"
- else:
- prefix = f"{instance}_{job.name.lower().replace(' ', '-')}_"
- archives = []
- for fname in os.listdir(backup_dir):
- if fname.startswith(prefix) and fname.endswith(".tar"):
- archives.append(fname)
- archives.sort(key=_extract_date)
- return archives
- def _extract_date(filename):
- match = re.search(r'(\d{8})', filename)
- if match:
- try:
- return datetime.strptime(match.group(1), "%Y%m%d")
- except ValueError:
- pass
- return datetime.min
- def _retention_count(archives, keep_n):
- if len(archives) <= keep_n:
- return []
- return archives[: len(archives) - keep_n]
- def _retention_daily(archives, days):
- cutoff = datetime.utcnow() - timedelta(days=days)
- to_delete = []
- seen_dates = set()
- for archive in reversed(archives):
- date = _extract_date(archive)
- if date < cutoff:
- to_delete.append(archive)
- continue
- date_key = date.date()
- if date_key in seen_dates:
- to_delete.append(archive)
- else:
- seen_dates.add(date_key)
- return to_delete
|