api.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345
  1. import glob
  2. import hashlib
  3. import math
  4. import os
  5. import shutil
  6. import subprocess
  7. import uuid
  8. from datetime import datetime
  9. from flask import (
  10. Blueprint,
  11. Response,
  12. current_app,
  13. jsonify,
  14. request,
  15. stream_with_context,
  16. )
  17. from db import db, Job, Run, Upload
  18. from helpers import read_archive_info
  19. bp = Blueprint("api", __name__, url_prefix="/api/v1")
  20. @bp.before_request
  21. def _check_api_auth():
  22. if request.endpoint == "api.api_health":
  23. return
  24. token = request.headers.get("X-BackupManager-Key", "")
  25. if token != current_app.config["API_TOKEN"]:
  26. return jsonify({"error": "Unauthorized"}), 401
  27. # --- Santé / jobs -------------------------------------------------------------
  28. @bp.route("/health")
  29. def api_health():
  30. return jsonify({"status": "ok", "instance": current_app.config.get("INSTANCE_NAME")})
  31. @bp.route("/jobs")
  32. def api_jobs():
  33. jobs = Job.query.all()
  34. return jsonify([
  35. {
  36. "id": j.id,
  37. "name": j.name,
  38. "type": j.type,
  39. "cron_expr": j.cron_expr,
  40. "enabled": j.enabled,
  41. "retention_mode": j.retention_mode,
  42. "retention_value": j.retention_value,
  43. }
  44. for j in jobs
  45. ])
  46. @bp.route("/jobs/<int:job_id>/runs")
  47. def api_job_runs(job_id):
  48. runs = Run.query.filter_by(job_id=job_id).order_by(Run.started_at.desc()).limit(50).all()
  49. return jsonify([
  50. {
  51. "id": r.id,
  52. "started_at": r.started_at.isoformat() if r.started_at else None,
  53. "finished_at": r.finished_at.isoformat() if r.finished_at else None,
  54. "status": r.status,
  55. "archive_name": r.archive_name,
  56. "size_bytes": r.size_bytes,
  57. }
  58. for r in runs
  59. ])
  60. @bp.route("/jobs/<int:job_id>/run", methods=["POST"])
  61. def api_job_run(job_id):
  62. import threading
  63. job = db.get_or_404(Job, job_id)
  64. from scheduler import _execute_job
  65. threading.Thread(target=_execute_job, args=(job.id,), daemon=True).start()
  66. return jsonify({"status": "triggered", "job_id": job_id})
  67. # --- Archives -----------------------------------------------------------------
  68. @bp.route("/archives")
  69. def api_archives():
  70. backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
  71. archives = []
  72. try:
  73. from jobs.utils import sudo_listdir, sudo_getsize, sudo_getmtime
  74. for fname in sorted(sudo_listdir(backup_dir)):
  75. if fname.endswith(".tar"):
  76. path = os.path.join(backup_dir, fname)
  77. archives.append({
  78. "name": fname[:-4],
  79. "size_bytes": sudo_getsize(path),
  80. "modified_at": datetime.utcfromtimestamp(sudo_getmtime(path)).isoformat(),
  81. })
  82. except OSError:
  83. pass
  84. return jsonify(archives)
  85. @bp.route("/archives/<name>", methods=["DELETE"])
  86. def api_archive_delete(name):
  87. backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
  88. from jobs.utils import sudo_exists
  89. for ext in (".tar", ".info.json"):
  90. path = os.path.join(backup_dir, name + ext)
  91. if sudo_exists(path):
  92. subprocess.run(["sudo", "rm", "-f", path], capture_output=True)
  93. return jsonify({"status": "deleted", "name": name})
  94. @bp.route("/archives/<name>/info")
  95. def api_archive_info(name):
  96. backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
  97. return jsonify(read_archive_info(name, backup_dir))
  98. @bp.route("/archives/<name>/restore", methods=["POST"])
  99. def api_archive_restore(name):
  100. from blueprints.jobs import _start_restore
  101. restore_run_id, _ = _start_restore(name)
  102. return jsonify({"status": "started", "run_id": restore_run_id})
  103. @bp.route("/archives/<name>/restore/status")
  104. def api_archive_restore_status(name):
  105. run = (Run.query
  106. .filter(Run.archive_name == name, Run.log_text.like("[RESTAURATION%"))
  107. .order_by(Run.started_at.desc())
  108. .first())
  109. if not run:
  110. return jsonify({"error": "Aucune restauration trouvée pour cette archive."}), 404
  111. return jsonify({
  112. "status": run.status,
  113. "log": run.log_text,
  114. "started_at": run.started_at.isoformat() if run.started_at else None,
  115. "finished_at": run.finished_at.isoformat() if run.finished_at else None,
  116. })
  117. @bp.route("/summary")
  118. def api_summary():
  119. jobs = Job.query.all()
  120. result = []
  121. for job in jobs:
  122. last_run = (Run.query.filter_by(job_id=job.id)
  123. .order_by(Run.started_at.desc()).first())
  124. result.append({
  125. "id": job.id,
  126. "name": job.name,
  127. "type": job.type,
  128. "cron_expr": job.cron_expr,
  129. "enabled": job.enabled,
  130. "last_run": {
  131. "id": last_run.id,
  132. "started_at": last_run.started_at.isoformat() if last_run.started_at else None,
  133. "status": last_run.status,
  134. "archive_name": last_run.archive_name,
  135. "size_bytes": last_run.size_bytes,
  136. } if last_run else None,
  137. })
  138. return jsonify({"instance": current_app.config.get("INSTANCE_NAME"), "jobs": result})
  139. # --- Téléchargement archives --------------------------------------------------
  140. @bp.route("/archives/<name>/info-json-download")
  141. def api_archive_info_json_download(name):
  142. from jobs.utils import sudo_exists
  143. backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
  144. info_path = os.path.join(backup_dir, name + ".info.json")
  145. if not sudo_exists(info_path):
  146. return jsonify({"error": "info.json introuvable"}), 404
  147. tmp_path = f"/tmp/backupmanager_dl_{name}.info.json"
  148. content = None
  149. try:
  150. result = subprocess.run(["sudo", "rsync", info_path, tmp_path],
  151. capture_output=True, text=True)
  152. if result.returncode != 0:
  153. return jsonify({"error": result.stderr.strip()}), 500
  154. with open(tmp_path, "rb") as f:
  155. content = f.read()
  156. except Exception as exc:
  157. return jsonify({"error": str(exc)}), 500
  158. finally:
  159. subprocess.run(["sudo", "rm", "-rf", tmp_path], capture_output=True)
  160. return Response(content, mimetype="application/json")
  161. @bp.route("/archives/<name>/download")
  162. def api_archive_download(name):
  163. from jobs.utils import sudo_exists
  164. backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
  165. archive_path = os.path.join(backup_dir, name + ".tar")
  166. if not sudo_exists(archive_path):
  167. return jsonify({"error": "archive introuvable"}), 404
  168. tmp_path = f"/tmp/backupmanager_dl_{name}.tar"
  169. try:
  170. result = subprocess.run(
  171. ["sudo", "rsync", archive_path, tmp_path],
  172. capture_output=True, text=True, timeout=3600,
  173. )
  174. if result.returncode != 0:
  175. return jsonify({"error": result.stderr.strip()}), 500
  176. def stream_and_cleanup():
  177. try:
  178. with open(tmp_path, "rb") as f:
  179. while True:
  180. chunk = f.read(1024 * 1024)
  181. if not chunk:
  182. break
  183. yield chunk
  184. finally:
  185. if os.path.exists(tmp_path):
  186. os.unlink(tmp_path)
  187. return Response(
  188. stream_with_context(stream_and_cleanup()),
  189. mimetype="application/octet-stream",
  190. headers={"Content-Disposition": f'attachment; filename="{name}.tar"'},
  191. )
  192. except Exception as exc:
  193. if os.path.exists(tmp_path):
  194. os.unlink(tmp_path)
  195. return jsonify({"error": str(exc)}), 500
  196. # --- Upload chunked -----------------------------------------------------------
  197. @bp.route("/archives/upload/start", methods=["POST"])
  198. def api_upload_start():
  199. data = request.get_json(force=True) or {}
  200. filename = data.get("filename", "")
  201. total_size = int(data.get("total_size", 0))
  202. chunk_size = int(data.get("chunk_size", 50 * 1024 * 1024))
  203. chunks_total = int(data.get("chunks_total",
  204. math.ceil(total_size / chunk_size) if chunk_size else 1))
  205. checksum = data.get("checksum", "")
  206. if not filename:
  207. return jsonify({"error": "filename requis"}), 400
  208. upload_id = str(uuid.uuid4())
  209. upload = Upload(
  210. upload_id=upload_id,
  211. filename=filename,
  212. total_size=total_size,
  213. chunk_size=chunk_size,
  214. chunks_total=chunks_total,
  215. chunks_received=0,
  216. checksum=checksum,
  217. status="pending",
  218. )
  219. db.session.add(upload)
  220. db.session.commit()
  221. return jsonify({"upload_id": upload_id, "chunks_total": chunks_total})
  222. @bp.route("/archives/upload/<upload_id>/chunk/<int:n>", methods=["POST"])
  223. def api_upload_chunk(upload_id, n):
  224. upload = db.get_or_404(Upload, upload_id)
  225. if upload.status == "complete":
  226. return jsonify({"error": "upload déjà terminé"}), 400
  227. tmp_dir = os.path.join(current_app.config["DATA_DIR"], "uploads", upload_id)
  228. os.makedirs(tmp_dir, exist_ok=True)
  229. chunk_path = os.path.join(tmp_dir, f"chunk_{n:06d}")
  230. with open(chunk_path, "wb") as f:
  231. f.write(request.data)
  232. upload.chunks_received = (upload.chunks_received or 0) + 1
  233. upload.status = "in_progress"
  234. db.session.commit()
  235. return jsonify({"chunk": n, "received": upload.chunks_received})
  236. @bp.route("/archives/upload/<upload_id>/finish", methods=["POST"])
  237. def api_upload_finish(upload_id):
  238. upload = db.get_or_404(Upload, upload_id)
  239. tmp_dir = os.path.join(current_app.config["DATA_DIR"], "uploads", upload_id)
  240. backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
  241. chunk_files = sorted(glob.glob(os.path.join(tmp_dir, "chunk_*")))
  242. if not chunk_files:
  243. return jsonify({"error": "aucun chunk reçu"}), 400
  244. tmp_archive = os.path.join(tmp_dir, upload.filename)
  245. sha256 = hashlib.sha256()
  246. with open(tmp_archive, "wb") as out:
  247. for chunk_file in chunk_files:
  248. with open(chunk_file, "rb") as f:
  249. data = f.read()
  250. out.write(data)
  251. sha256.update(data)
  252. if upload.checksum and sha256.hexdigest() != upload.checksum:
  253. upload.status = "error"
  254. db.session.commit()
  255. shutil.rmtree(tmp_dir, ignore_errors=True)
  256. return jsonify({"error": "checksum invalide"}), 400
  257. dest_path = os.path.join(backup_dir, upload.filename)
  258. result = subprocess.run(
  259. ["sudo", "rsync", tmp_archive, dest_path],
  260. capture_output=True, text=True,
  261. )
  262. if result.returncode != 0:
  263. upload.status = "error"
  264. db.session.commit()
  265. shutil.rmtree(tmp_dir, ignore_errors=True)
  266. return jsonify({"error": result.stderr.strip()}), 500
  267. data = request.get_json(silent=True) or {}
  268. info_json_str = data.get("info_json")
  269. if info_json_str:
  270. archive_base = upload.filename[:-4] if upload.filename.endswith(".tar") else upload.filename
  271. tmp_info = os.path.join(tmp_dir, archive_base + ".info.json")
  272. with open(tmp_info, "w") as f:
  273. f.write(info_json_str)
  274. subprocess.run(
  275. ["sudo", "rsync", tmp_info,
  276. os.path.join(backup_dir, archive_base + ".info.json")],
  277. capture_output=True,
  278. )
  279. shutil.rmtree(tmp_dir, ignore_errors=True)
  280. upload.status = "complete"
  281. db.session.commit()
  282. return jsonify({"status": "complete", "filename": upload.filename})
  283. @bp.route("/archives/upload/<upload_id>", methods=["DELETE"])
  284. def api_upload_cancel(upload_id):
  285. upload = db.get_or_404(Upload, upload_id)
  286. tmp_dir = os.path.join(current_app.config["DATA_DIR"], "uploads", upload_id)
  287. shutil.rmtree(tmp_dir, ignore_errors=True)
  288. db.session.delete(upload)
  289. db.session.commit()
  290. return jsonify({"status": "cancelled"})