| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345 |
- import glob
- import hashlib
- import math
- import os
- import shutil
- import subprocess
- import uuid
- from datetime import datetime
- from flask import (
- Blueprint,
- Response,
- current_app,
- jsonify,
- request,
- stream_with_context,
- )
- from db import db, Job, Run, Upload
- from helpers import read_archive_info
- bp = Blueprint("api", __name__, url_prefix="/api/v1")
- @bp.before_request
- def _check_api_auth():
- if request.endpoint == "api.api_health":
- return
- token = request.headers.get("X-BackupManager-Key", "")
- if token != current_app.config["API_TOKEN"]:
- return jsonify({"error": "Unauthorized"}), 401
- # --- Santé / jobs -------------------------------------------------------------
- @bp.route("/health")
- def api_health():
- return jsonify({"status": "ok", "instance": current_app.config.get("INSTANCE_NAME")})
- @bp.route("/jobs")
- def api_jobs():
- jobs = Job.query.all()
- return jsonify([
- {
- "id": j.id,
- "name": j.name,
- "type": j.type,
- "cron_expr": j.cron_expr,
- "enabled": j.enabled,
- "retention_mode": j.retention_mode,
- "retention_value": j.retention_value,
- }
- for j in jobs
- ])
- @bp.route("/jobs/<int:job_id>/runs")
- def api_job_runs(job_id):
- runs = Run.query.filter_by(job_id=job_id).order_by(Run.started_at.desc()).limit(50).all()
- return jsonify([
- {
- "id": r.id,
- "started_at": r.started_at.isoformat() if r.started_at else None,
- "finished_at": r.finished_at.isoformat() if r.finished_at else None,
- "status": r.status,
- "archive_name": r.archive_name,
- "size_bytes": r.size_bytes,
- }
- for r in runs
- ])
- @bp.route("/jobs/<int:job_id>/run", methods=["POST"])
- def api_job_run(job_id):
- import threading
- job = db.get_or_404(Job, job_id)
- from scheduler import _execute_job
- threading.Thread(target=_execute_job, args=(job.id,), daemon=True).start()
- return jsonify({"status": "triggered", "job_id": job_id})
- # --- Archives -----------------------------------------------------------------
- @bp.route("/archives")
- def api_archives():
- backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
- archives = []
- try:
- from jobs.utils import sudo_listdir, sudo_getsize, sudo_getmtime
- for fname in sorted(sudo_listdir(backup_dir)):
- if fname.endswith(".tar"):
- path = os.path.join(backup_dir, fname)
- archives.append({
- "name": fname[:-4],
- "size_bytes": sudo_getsize(path),
- "modified_at": datetime.utcfromtimestamp(sudo_getmtime(path)).isoformat(),
- })
- except OSError:
- pass
- return jsonify(archives)
- @bp.route("/archives/<name>", methods=["DELETE"])
- def api_archive_delete(name):
- backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
- from jobs.utils import sudo_exists
- for ext in (".tar", ".info.json"):
- path = os.path.join(backup_dir, name + ext)
- if sudo_exists(path):
- subprocess.run(["sudo", "rm", "-f", path], capture_output=True)
- return jsonify({"status": "deleted", "name": name})
- @bp.route("/archives/<name>/info")
- def api_archive_info(name):
- backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
- return jsonify(read_archive_info(name, backup_dir))
- @bp.route("/archives/<name>/restore", methods=["POST"])
- def api_archive_restore(name):
- from blueprints.jobs import _start_restore
- restore_run_id, _ = _start_restore(name)
- return jsonify({"status": "started", "run_id": restore_run_id})
- @bp.route("/archives/<name>/restore/status")
- def api_archive_restore_status(name):
- run = (Run.query
- .filter(Run.archive_name == name, Run.log_text.like("[RESTAURATION%"))
- .order_by(Run.started_at.desc())
- .first())
- if not run:
- return jsonify({"error": "Aucune restauration trouvée pour cette archive."}), 404
- return jsonify({
- "status": run.status,
- "log": run.log_text,
- "started_at": run.started_at.isoformat() if run.started_at else None,
- "finished_at": run.finished_at.isoformat() if run.finished_at else None,
- })
- @bp.route("/summary")
- def api_summary():
- jobs = Job.query.all()
- result = []
- for job in jobs:
- last_run = (Run.query.filter_by(job_id=job.id)
- .order_by(Run.started_at.desc()).first())
- result.append({
- "id": job.id,
- "name": job.name,
- "type": job.type,
- "cron_expr": job.cron_expr,
- "enabled": job.enabled,
- "last_run": {
- "id": last_run.id,
- "started_at": last_run.started_at.isoformat() if last_run.started_at else None,
- "status": last_run.status,
- "archive_name": last_run.archive_name,
- "size_bytes": last_run.size_bytes,
- } if last_run else None,
- })
- return jsonify({"instance": current_app.config.get("INSTANCE_NAME"), "jobs": result})
- # --- Téléchargement archives --------------------------------------------------
- @bp.route("/archives/<name>/info-json-download")
- def api_archive_info_json_download(name):
- from jobs.utils import sudo_exists
- backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
- info_path = os.path.join(backup_dir, name + ".info.json")
- if not sudo_exists(info_path):
- return jsonify({"error": "info.json introuvable"}), 404
- tmp_path = f"/tmp/backupmanager_dl_{name}.info.json"
- content = None
- try:
- result = subprocess.run(["sudo", "rsync", info_path, tmp_path],
- capture_output=True, text=True)
- if result.returncode != 0:
- return jsonify({"error": result.stderr.strip()}), 500
- with open(tmp_path, "rb") as f:
- content = f.read()
- except Exception as exc:
- return jsonify({"error": str(exc)}), 500
- finally:
- subprocess.run(["sudo", "rm", "-rf", tmp_path], capture_output=True)
- return Response(content, mimetype="application/json")
- @bp.route("/archives/<name>/download")
- def api_archive_download(name):
- from jobs.utils import sudo_exists
- backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
- archive_path = os.path.join(backup_dir, name + ".tar")
- if not sudo_exists(archive_path):
- return jsonify({"error": "archive introuvable"}), 404
- tmp_path = f"/tmp/backupmanager_dl_{name}.tar"
- try:
- result = subprocess.run(
- ["sudo", "rsync", archive_path, tmp_path],
- capture_output=True, text=True, timeout=3600,
- )
- if result.returncode != 0:
- return jsonify({"error": result.stderr.strip()}), 500
- def stream_and_cleanup():
- try:
- with open(tmp_path, "rb") as f:
- while True:
- chunk = f.read(1024 * 1024)
- if not chunk:
- break
- yield chunk
- finally:
- if os.path.exists(tmp_path):
- os.unlink(tmp_path)
- return Response(
- stream_with_context(stream_and_cleanup()),
- mimetype="application/octet-stream",
- headers={"Content-Disposition": f'attachment; filename="{name}.tar"'},
- )
- except Exception as exc:
- if os.path.exists(tmp_path):
- os.unlink(tmp_path)
- return jsonify({"error": str(exc)}), 500
- # --- Upload chunked -----------------------------------------------------------
- @bp.route("/archives/upload/start", methods=["POST"])
- def api_upload_start():
- data = request.get_json(force=True) or {}
- filename = data.get("filename", "")
- total_size = int(data.get("total_size", 0))
- chunk_size = int(data.get("chunk_size", 50 * 1024 * 1024))
- chunks_total = int(data.get("chunks_total",
- math.ceil(total_size / chunk_size) if chunk_size else 1))
- checksum = data.get("checksum", "")
- if not filename:
- return jsonify({"error": "filename requis"}), 400
- upload_id = str(uuid.uuid4())
- upload = Upload(
- upload_id=upload_id,
- filename=filename,
- total_size=total_size,
- chunk_size=chunk_size,
- chunks_total=chunks_total,
- chunks_received=0,
- checksum=checksum,
- status="pending",
- )
- db.session.add(upload)
- db.session.commit()
- return jsonify({"upload_id": upload_id, "chunks_total": chunks_total})
- @bp.route("/archives/upload/<upload_id>/chunk/<int:n>", methods=["POST"])
- def api_upload_chunk(upload_id, n):
- upload = db.get_or_404(Upload, upload_id)
- if upload.status == "complete":
- return jsonify({"error": "upload déjà terminé"}), 400
- tmp_dir = os.path.join(current_app.config["DATA_DIR"], "uploads", upload_id)
- os.makedirs(tmp_dir, exist_ok=True)
- chunk_path = os.path.join(tmp_dir, f"chunk_{n:06d}")
- with open(chunk_path, "wb") as f:
- f.write(request.data)
- upload.chunks_received = (upload.chunks_received or 0) + 1
- upload.status = "in_progress"
- db.session.commit()
- return jsonify({"chunk": n, "received": upload.chunks_received})
- @bp.route("/archives/upload/<upload_id>/finish", methods=["POST"])
- def api_upload_finish(upload_id):
- upload = db.get_or_404(Upload, upload_id)
- tmp_dir = os.path.join(current_app.config["DATA_DIR"], "uploads", upload_id)
- backup_dir = current_app.config["YUNOHOST_BACKUP_DIR"]
- chunk_files = sorted(glob.glob(os.path.join(tmp_dir, "chunk_*")))
- if not chunk_files:
- return jsonify({"error": "aucun chunk reçu"}), 400
- tmp_archive = os.path.join(tmp_dir, upload.filename)
- sha256 = hashlib.sha256()
- with open(tmp_archive, "wb") as out:
- for chunk_file in chunk_files:
- with open(chunk_file, "rb") as f:
- data = f.read()
- out.write(data)
- sha256.update(data)
- if upload.checksum and sha256.hexdigest() != upload.checksum:
- upload.status = "error"
- db.session.commit()
- shutil.rmtree(tmp_dir, ignore_errors=True)
- return jsonify({"error": "checksum invalide"}), 400
- dest_path = os.path.join(backup_dir, upload.filename)
- result = subprocess.run(
- ["sudo", "rsync", tmp_archive, dest_path],
- capture_output=True, text=True,
- )
- if result.returncode != 0:
- upload.status = "error"
- db.session.commit()
- shutil.rmtree(tmp_dir, ignore_errors=True)
- return jsonify({"error": result.stderr.strip()}), 500
- data = request.get_json(silent=True) or {}
- info_json_str = data.get("info_json")
- if info_json_str:
- archive_base = upload.filename[:-4] if upload.filename.endswith(".tar") else upload.filename
- tmp_info = os.path.join(tmp_dir, archive_base + ".info.json")
- with open(tmp_info, "w") as f:
- f.write(info_json_str)
- subprocess.run(
- ["sudo", "rsync", tmp_info,
- os.path.join(backup_dir, archive_base + ".info.json")],
- capture_output=True,
- )
- shutil.rmtree(tmp_dir, ignore_errors=True)
- upload.status = "complete"
- db.session.commit()
- return jsonify({"status": "complete", "filename": upload.filename})
- @bp.route("/archives/upload/<upload_id>", methods=["DELETE"])
- def api_upload_cancel(upload_id):
- upload = db.get_or_404(Upload, upload_id)
- tmp_dir = os.path.join(current_app.config["DATA_DIR"], "uploads", upload_id)
- shutil.rmtree(tmp_dir, ignore_errors=True)
- db.session.delete(upload)
- db.session.commit()
- return jsonify({"status": "cancelled"})
|