|
|
@@ -785,6 +785,31 @@ def api_upload_finish(upload_id):
|
|
|
return jsonify({"status": "complete", "filename": upload.filename})
|
|
|
|
|
|
|
|
|
+@app.route("/api/v1/archives/<name>/info-json-download")
|
|
|
+def api_archive_info_json_download(name):
|
|
|
+ """Téléchargement du .info.json via sudo rsync (pour pull inter-instances)."""
|
|
|
+ from jobs.utils import sudo_exists
|
|
|
+ backup_dir = app.config["YUNOHOST_BACKUP_DIR"]
|
|
|
+ info_path = os.path.join(backup_dir, name + ".info.json")
|
|
|
+ if not sudo_exists(info_path):
|
|
|
+ return jsonify({"error": "info.json introuvable"}), 404
|
|
|
+ tmp_path = f"/tmp/backupmanager_dl_{name}.info.json"
|
|
|
+ try:
|
|
|
+ result = subprocess.run(["sudo", "rsync", info_path, tmp_path],
|
|
|
+ capture_output=True, text=True)
|
|
|
+ if result.returncode != 0:
|
|
|
+ return jsonify({"error": result.stderr.strip()}), 500
|
|
|
+ with open(tmp_path, "rb") as f:
|
|
|
+ content = f.read()
|
|
|
+ os.unlink(tmp_path)
|
|
|
+ from flask import Response as _R
|
|
|
+ return _R(content, mimetype="application/json")
|
|
|
+ except Exception as exc:
|
|
|
+ if os.path.exists(tmp_path):
|
|
|
+ os.unlink(tmp_path)
|
|
|
+ return jsonify({"error": str(exc)}), 500
|
|
|
+
|
|
|
+
|
|
|
@app.route("/api/v1/archives/<name>/download")
|
|
|
def api_archive_download(name):
|
|
|
"""Téléchargement d'une archive via sudo rsync vers /tmp (pour pull inter-instances)."""
|
|
|
@@ -956,11 +981,11 @@ def archive_push(archive_name, inst_id):
|
|
|
return redirect(request.referrer or url_for("index"))
|
|
|
|
|
|
|
|
|
-@app.route("/remote-instances/<int:inst_id>/pull/<path:archive_name>", methods=["POST"])
|
|
|
-def archive_pull(inst_id, archive_name):
|
|
|
+@app.route("/remote-instances/<int:inst_id>/pull-latest/<int:remote_job_id>", methods=["POST"])
|
|
|
+def archive_pull_latest(inst_id, remote_job_id):
|
|
|
inst = db.get_or_404(RemoteInstance, inst_id)
|
|
|
- threading.Thread(target=_do_pull_archive, args=(archive_name, inst.id), daemon=True).start()
|
|
|
- flash(f"Rapatriement de « {archive_name} » depuis « {inst.name} » démarré.", "success")
|
|
|
+ threading.Thread(target=_do_pull_latest, args=(inst.id, remote_job_id), daemon=True).start()
|
|
|
+ flash(f"Rapatriement depuis « {inst.name} » démarré en arrière-plan.", "success")
|
|
|
return redirect(url_for("dashboard_network"))
|
|
|
|
|
|
|
|
|
@@ -1028,27 +1053,51 @@ def _do_push_archive(archive_name, inst_id):
|
|
|
os.unlink(tmp_path)
|
|
|
|
|
|
|
|
|
-def _do_pull_archive(archive_name, inst_id):
|
|
|
- """Rapatrie une archive depuis une instance distante via HTTP chunked."""
|
|
|
- import hashlib as _hashlib
|
|
|
- from federation.client import FederationClient
|
|
|
+def _do_pull_latest(inst_id, remote_job_id):
|
|
|
+ """Rapatrie la dernière archive d'un job distant (.tar + .info.json)."""
|
|
|
+ from federation.client import FederationClient, sync_instance
|
|
|
|
|
|
with app.app_context():
|
|
|
inst = db.session.get(RemoteInstance, inst_id)
|
|
|
backup_dir = app.config["YUNOHOST_BACKUP_DIR"]
|
|
|
try:
|
|
|
client = FederationClient(inst)
|
|
|
- # Télécharge l'archive chunk par chunk
|
|
|
+
|
|
|
+ # Sync pour obtenir la dernière archive
|
|
|
+ sync_instance(inst)
|
|
|
+ db.session.refresh(inst)
|
|
|
+
|
|
|
+ # Récupère le dernier run de ce job distant
|
|
|
+ runs = client.get_job_runs(remote_job_id)
|
|
|
+ if not runs:
|
|
|
+ raise RuntimeError(f"Aucun run distant pour le job {remote_job_id}")
|
|
|
+ archive_name = runs[0].get("archive_name")
|
|
|
+ if not archive_name:
|
|
|
+ raise RuntimeError("Le dernier run distant n'a pas d'archive.")
|
|
|
+
|
|
|
+ # Télécharge le .tar
|
|
|
archive_bytes = client.download_archive(archive_name)
|
|
|
- tmp_path = f"/tmp/backupmanager_pull_{archive_name}.tar"
|
|
|
- with open(tmp_path, "wb") as f:
|
|
|
+ tmp_tar = f"/tmp/backupmanager_pull_{archive_name}.tar"
|
|
|
+ with open(tmp_tar, "wb") as f:
|
|
|
f.write(archive_bytes)
|
|
|
- dest = os.path.join(backup_dir, archive_name + ".tar")
|
|
|
- subprocess.run(["sudo", "rsync", tmp_path, dest], check=True)
|
|
|
- os.unlink(tmp_path)
|
|
|
+ subprocess.run(["sudo", "rsync", tmp_tar,
|
|
|
+ os.path.join(backup_dir, archive_name + ".tar")], check=True)
|
|
|
+ os.unlink(tmp_tar)
|
|
|
+
|
|
|
+ # Télécharge le .info.json si disponible
|
|
|
+ info_bytes = client.download_info_json(archive_name)
|
|
|
+ if info_bytes:
|
|
|
+ tmp_info = f"/tmp/backupmanager_pull_{archive_name}.info.json"
|
|
|
+ with open(tmp_info, "wb") as f:
|
|
|
+ f.write(info_bytes)
|
|
|
+ subprocess.run(["sudo", "rsync", tmp_info,
|
|
|
+ os.path.join(backup_dir, archive_name + ".info.json")],
|
|
|
+ check=True)
|
|
|
+ os.unlink(tmp_info)
|
|
|
+
|
|
|
app.logger.info(f"Pull {archive_name} ← {inst.name} OK")
|
|
|
except Exception as exc:
|
|
|
- app.logger.error(f"Pull {archive_name} ← {inst.name} échoué : {exc}")
|
|
|
+ app.logger.error(f"Pull ← {inst.name} échoué : {exc}")
|
|
|
|
|
|
|
|
|
class _JobRow:
|