|
@@ -1,7 +1,13 @@
|
|
|
|
|
+import glob
|
|
|
|
|
+import hashlib
|
|
|
import json
|
|
import json
|
|
|
import logging
|
|
import logging
|
|
|
|
|
+import math
|
|
|
import os
|
|
import os
|
|
|
|
|
+import shutil
|
|
|
import subprocess
|
|
import subprocess
|
|
|
|
|
+import threading
|
|
|
|
|
+import uuid
|
|
|
from datetime import datetime
|
|
from datetime import datetime
|
|
|
|
|
|
|
|
from flask import (
|
|
from flask import (
|
|
@@ -43,7 +49,7 @@ logging.basicConfig(
|
|
|
|
|
|
|
|
# --- Extensions --------------------------------------------------------------
|
|
# --- Extensions --------------------------------------------------------------
|
|
|
|
|
|
|
|
-from db import db, Job, Run, Destination, Setting
|
|
|
|
|
|
|
+from db import db, Job, Run, Destination, Setting, RemoteInstance, RemoteRun, Upload
|
|
|
|
|
|
|
|
db.init_app(app)
|
|
db.init_app(app)
|
|
|
|
|
|
|
@@ -167,15 +173,59 @@ def job_history(job_id):
|
|
|
return render_template("job_history.html", job=job, runs=runs)
|
|
return render_template("job_history.html", job=job, runs=runs)
|
|
|
|
|
|
|
|
|
|
|
|
|
-@app.route("/archives/<path:archive_name>/restore", methods=["GET", "POST"])
|
|
|
|
|
-def archive_restore(archive_name):
|
|
|
|
|
|
|
+def _do_restore_job(archive_name, archive_type, restore_run_id):
|
|
|
|
|
+ """Exécute la restauration en arrière-plan et met à jour le Run."""
|
|
|
|
|
+ with app.app_context():
|
|
|
|
|
+ run = db.session.get(Run, restore_run_id) if restore_run_id else None
|
|
|
|
|
+ try:
|
|
|
|
|
+ backup_dir = app.config["YUNOHOST_BACKUP_DIR"]
|
|
|
|
|
+ if archive_type == "custom_dir":
|
|
|
|
|
+ from jobs.custom_dir import restore_custom_dir
|
|
|
|
|
+ log = restore_custom_dir(archive_name, backup_dir)
|
|
|
|
|
+ elif archive_type in ("mysql", "postgresql"):
|
|
|
|
|
+ from jobs.db_dump import restore_db_dump
|
|
|
|
|
+ log = restore_db_dump(archive_name, backup_dir)
|
|
|
|
|
+ elif archive_type == "ynh_app":
|
|
|
|
|
+ result = subprocess.run(
|
|
|
|
|
+ ["sudo", "yunohost", "backup", "restore", archive_name,
|
|
|
|
|
+ "--apps", "--force"],
|
|
|
|
|
+ capture_output=True, text=True, timeout=3600,
|
|
|
|
|
+ )
|
|
|
|
|
+ log = (result.stdout + result.stderr).strip()
|
|
|
|
|
+ if result.returncode != 0:
|
|
|
|
|
+ raise RuntimeError(f"yunohost backup restore a échoué :\n{log}")
|
|
|
|
|
+ elif archive_type == "ynh_system":
|
|
|
|
|
+ result = subprocess.run(
|
|
|
|
|
+ ["sudo", "yunohost", "backup", "restore", archive_name,
|
|
|
|
|
+ "--system", "--force"],
|
|
|
|
|
+ capture_output=True, text=True, timeout=3600,
|
|
|
|
|
+ )
|
|
|
|
|
+ log = (result.stdout + result.stderr).strip()
|
|
|
|
|
+ if result.returncode != 0:
|
|
|
|
|
+ raise RuntimeError(f"yunohost backup restore a échoué :\n{log}")
|
|
|
|
|
+ else:
|
|
|
|
|
+ raise NotImplementedError(
|
|
|
|
|
+ f"Restauration non supportée pour le type '{archive_type}'."
|
|
|
|
|
+ )
|
|
|
|
|
+ if run:
|
|
|
|
|
+ run.status = "success"
|
|
|
|
|
+ run.finished_at = datetime.utcnow()
|
|
|
|
|
+ run.log_text = f"[RESTAURATION]\n{log or 'OK'}"
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ except Exception as exc:
|
|
|
|
|
+ app.logger.error(f"Restauration {archive_name} échouée : {exc}")
|
|
|
|
|
+ if run:
|
|
|
|
|
+ run.status = "error"
|
|
|
|
|
+ run.finished_at = datetime.utcnow()
|
|
|
|
|
+ run.log_text = f"[RESTAURATION]\n{exc}"
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+def _start_restore(archive_name):
|
|
|
|
|
+ """Crée un Run de restauration et lance le thread. Retourne (restore_run_id, archive_type)."""
|
|
|
info = _read_archive_info(archive_name)
|
|
info = _read_archive_info(archive_name)
|
|
|
archive_type = info.get("type", "")
|
|
archive_type = info.get("type", "")
|
|
|
|
|
|
|
|
- if request.method == "GET":
|
|
|
|
|
- return render_template("restore_confirm.html", archive_name=archive_name, info=info)
|
|
|
|
|
-
|
|
|
|
|
- # Trouver le job_id depuis le Run original pour pouvoir tracer la restauration
|
|
|
|
|
original_run = Run.query.filter_by(archive_name=archive_name).first()
|
|
original_run = Run.query.filter_by(archive_name=archive_name).first()
|
|
|
restore_run_id = None
|
|
restore_run_id = None
|
|
|
if original_run:
|
|
if original_run:
|
|
@@ -190,54 +240,22 @@ def archive_restore(archive_name):
|
|
|
db.session.commit()
|
|
db.session.commit()
|
|
|
restore_run_id = restore_run.id
|
|
restore_run_id = restore_run.id
|
|
|
|
|
|
|
|
- def _do_restore():
|
|
|
|
|
- with app.app_context():
|
|
|
|
|
- run = db.session.get(Run, restore_run_id) if restore_run_id else None
|
|
|
|
|
- try:
|
|
|
|
|
- backup_dir = app.config["YUNOHOST_BACKUP_DIR"]
|
|
|
|
|
- if archive_type == "custom_dir":
|
|
|
|
|
- from jobs.custom_dir import restore_custom_dir
|
|
|
|
|
- log = restore_custom_dir(archive_name, backup_dir)
|
|
|
|
|
- elif archive_type in ("mysql", "postgresql"):
|
|
|
|
|
- from jobs.db_dump import restore_db_dump
|
|
|
|
|
- log = restore_db_dump(archive_name, backup_dir)
|
|
|
|
|
- elif archive_type == "ynh_app":
|
|
|
|
|
- result = subprocess.run(
|
|
|
|
|
- ["sudo", "yunohost", "backup", "restore", archive_name,
|
|
|
|
|
- "--apps", "--force"],
|
|
|
|
|
- capture_output=True, text=True, timeout=3600,
|
|
|
|
|
- )
|
|
|
|
|
- log = (result.stdout + result.stderr).strip()
|
|
|
|
|
- if result.returncode != 0:
|
|
|
|
|
- raise RuntimeError(f"yunohost backup restore a échoué :\n{log}")
|
|
|
|
|
- elif archive_type == "ynh_system":
|
|
|
|
|
- result = subprocess.run(
|
|
|
|
|
- ["sudo", "yunohost", "backup", "restore", archive_name,
|
|
|
|
|
- "--system", "--force"],
|
|
|
|
|
- capture_output=True, text=True, timeout=3600,
|
|
|
|
|
- )
|
|
|
|
|
- log = (result.stdout + result.stderr).strip()
|
|
|
|
|
- if result.returncode != 0:
|
|
|
|
|
- raise RuntimeError(f"yunohost backup restore a échoué :\n{log}")
|
|
|
|
|
- else:
|
|
|
|
|
- raise NotImplementedError(
|
|
|
|
|
- f"Restauration non supportée pour le type '{archive_type}'."
|
|
|
|
|
- )
|
|
|
|
|
- if run:
|
|
|
|
|
- run.status = "success"
|
|
|
|
|
- run.finished_at = datetime.utcnow()
|
|
|
|
|
- run.log_text = f"[RESTAURATION]\n{log or 'OK'}"
|
|
|
|
|
- db.session.commit()
|
|
|
|
|
- except Exception as exc:
|
|
|
|
|
- app.logger.error(f"Restauration {archive_name} échouée : {exc}")
|
|
|
|
|
- if run:
|
|
|
|
|
- run.status = "error"
|
|
|
|
|
- run.finished_at = datetime.utcnow()
|
|
|
|
|
- run.log_text = f"[RESTAURATION]\n{exc}"
|
|
|
|
|
- db.session.commit()
|
|
|
|
|
|
|
+ threading.Thread(
|
|
|
|
|
+ target=_do_restore_job,
|
|
|
|
|
+ args=(archive_name, archive_type, restore_run_id),
|
|
|
|
|
+ daemon=True,
|
|
|
|
|
+ ).start()
|
|
|
|
|
+ return restore_run_id, archive_type
|
|
|
|
|
|
|
|
- import threading
|
|
|
|
|
- threading.Thread(target=_do_restore, daemon=True).start()
|
|
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/archives/<path:archive_name>/restore", methods=["GET", "POST"])
|
|
|
|
|
+def archive_restore(archive_name):
|
|
|
|
|
+ info = _read_archive_info(archive_name)
|
|
|
|
|
+
|
|
|
|
|
+ if request.method == "GET":
|
|
|
|
|
+ return render_template("restore_confirm.html", archive_name=archive_name, info=info)
|
|
|
|
|
+
|
|
|
|
|
+ _start_restore(archive_name)
|
|
|
flash(f"Restauration de « {archive_name} » démarrée en arrière-plan.", "success")
|
|
flash(f"Restauration de « {archive_name} » démarrée en arrière-plan.", "success")
|
|
|
return redirect(url_for("index"))
|
|
return redirect(url_for("index"))
|
|
|
|
|
|
|
@@ -600,8 +618,244 @@ def api_archives():
|
|
|
@app.route("/api/v1/archives/<name>", methods=["DELETE"])
|
|
@app.route("/api/v1/archives/<name>", methods=["DELETE"])
|
|
|
def api_archive_delete(name):
|
|
def api_archive_delete(name):
|
|
|
backup_dir = app.config["YUNOHOST_BACKUP_DIR"]
|
|
backup_dir = app.config["YUNOHOST_BACKUP_DIR"]
|
|
|
|
|
+ from jobs.utils import sudo_exists
|
|
|
for ext in (".tar", ".info.json"):
|
|
for ext in (".tar", ".info.json"):
|
|
|
path = os.path.join(backup_dir, name + ext)
|
|
path = os.path.join(backup_dir, name + ext)
|
|
|
- if os.path.exists(path):
|
|
|
|
|
- os.remove(path)
|
|
|
|
|
|
|
+ if sudo_exists(path):
|
|
|
|
|
+ subprocess.run(["sudo", "rm", "-f", path], capture_output=True)
|
|
|
return jsonify({"status": "deleted", "name": name})
|
|
return jsonify({"status": "deleted", "name": name})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/archives/<name>/info")
|
|
|
|
|
+def api_archive_info(name):
|
|
|
|
|
+ return jsonify(_read_archive_info(name))
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/archives/<name>/restore", methods=["POST"])
|
|
|
|
|
+def api_archive_restore(name):
|
|
|
|
|
+ restore_run_id, _ = _start_restore(name)
|
|
|
|
|
+ return jsonify({"status": "started", "run_id": restore_run_id})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/archives/<name>/restore/status")
|
|
|
|
|
+def api_archive_restore_status(name):
|
|
|
|
|
+ run = (Run.query
|
|
|
|
|
+ .filter(Run.archive_name == name, Run.log_text.like("[RESTAURATION%"))
|
|
|
|
|
+ .order_by(Run.started_at.desc())
|
|
|
|
|
+ .first())
|
|
|
|
|
+ if not run:
|
|
|
|
|
+ return jsonify({"error": "Aucune restauration trouvée pour cette archive."}), 404
|
|
|
|
|
+ return jsonify({
|
|
|
|
|
+ "status": run.status,
|
|
|
|
|
+ "log": run.log_text,
|
|
|
|
|
+ "started_at": run.started_at.isoformat() if run.started_at else None,
|
|
|
|
|
+ "finished_at": run.finished_at.isoformat() if run.finished_at else None,
|
|
|
|
|
+ })
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/summary")
|
|
|
|
|
+def api_summary():
|
|
|
|
|
+ jobs = Job.query.all()
|
|
|
|
|
+ result = []
|
|
|
|
|
+ for job in jobs:
|
|
|
|
|
+ last_run = (Run.query.filter_by(job_id=job.id)
|
|
|
|
|
+ .order_by(Run.started_at.desc()).first())
|
|
|
|
|
+ result.append({
|
|
|
|
|
+ "id": job.id,
|
|
|
|
|
+ "name": job.name,
|
|
|
|
|
+ "type": job.type,
|
|
|
|
|
+ "cron_expr": job.cron_expr,
|
|
|
|
|
+ "enabled": job.enabled,
|
|
|
|
|
+ "last_run": {
|
|
|
|
|
+ "id": last_run.id,
|
|
|
|
|
+ "started_at": last_run.started_at.isoformat() if last_run.started_at else None,
|
|
|
|
|
+ "status": last_run.status,
|
|
|
|
|
+ "archive_name": last_run.archive_name,
|
|
|
|
|
+ "size_bytes": last_run.size_bytes,
|
|
|
|
|
+ } if last_run else None,
|
|
|
|
|
+ })
|
|
|
|
|
+ return jsonify({"instance": app.config.get("INSTANCE_NAME"), "jobs": result})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+# --- Upload chunked -----------------------------------------------------------
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/archives/upload/start", methods=["POST"])
|
|
|
|
|
+def api_upload_start():
|
|
|
|
|
+ data = request.get_json(force=True) or {}
|
|
|
|
|
+ filename = data.get("filename", "")
|
|
|
|
|
+ total_size = int(data.get("total_size", 0))
|
|
|
|
|
+ chunk_size = int(data.get("chunk_size", 50 * 1024 * 1024))
|
|
|
|
|
+ chunks_total = int(data.get("chunks_total", math.ceil(total_size / chunk_size) if chunk_size else 1))
|
|
|
|
|
+ checksum = data.get("checksum", "")
|
|
|
|
|
+
|
|
|
|
|
+ if not filename:
|
|
|
|
|
+ return jsonify({"error": "filename requis"}), 400
|
|
|
|
|
+
|
|
|
|
|
+ upload_id = str(uuid.uuid4())
|
|
|
|
|
+ upload = Upload(
|
|
|
|
|
+ upload_id=upload_id,
|
|
|
|
|
+ filename=filename,
|
|
|
|
|
+ total_size=total_size,
|
|
|
|
|
+ chunk_size=chunk_size,
|
|
|
|
|
+ chunks_total=chunks_total,
|
|
|
|
|
+ chunks_received=0,
|
|
|
|
|
+ checksum=checksum,
|
|
|
|
|
+ status="pending",
|
|
|
|
|
+ )
|
|
|
|
|
+ db.session.add(upload)
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ return jsonify({"upload_id": upload_id, "chunks_total": chunks_total})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/archives/upload/<upload_id>/chunk/<int:n>", methods=["POST"])
|
|
|
|
|
+def api_upload_chunk(upload_id, n):
|
|
|
|
|
+ upload = db.get_or_404(Upload, upload_id)
|
|
|
|
|
+ if upload.status == "complete":
|
|
|
|
|
+ return jsonify({"error": "upload déjà terminé"}), 400
|
|
|
|
|
+
|
|
|
|
|
+ tmp_dir = os.path.join(app.config["DATA_DIR"], "uploads", upload_id)
|
|
|
|
|
+ os.makedirs(tmp_dir, exist_ok=True)
|
|
|
|
|
+
|
|
|
|
|
+ chunk_path = os.path.join(tmp_dir, f"chunk_{n:06d}")
|
|
|
|
|
+ with open(chunk_path, "wb") as f:
|
|
|
|
|
+ f.write(request.data)
|
|
|
|
|
+
|
|
|
|
|
+ upload.chunks_received = (upload.chunks_received or 0) + 1
|
|
|
|
|
+ upload.status = "in_progress"
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ return jsonify({"chunk": n, "received": upload.chunks_received})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/archives/upload/<upload_id>/finish", methods=["POST"])
|
|
|
|
|
+def api_upload_finish(upload_id):
|
|
|
|
|
+ upload = db.get_or_404(Upload, upload_id)
|
|
|
|
|
+ tmp_dir = os.path.join(app.config["DATA_DIR"], "uploads", upload_id)
|
|
|
|
|
+ backup_dir = app.config["YUNOHOST_BACKUP_DIR"]
|
|
|
|
|
+
|
|
|
|
|
+ chunk_files = sorted(glob.glob(os.path.join(tmp_dir, "chunk_*")))
|
|
|
|
|
+ if not chunk_files:
|
|
|
|
|
+ return jsonify({"error": "aucun chunk reçu"}), 400
|
|
|
|
|
+
|
|
|
|
|
+ tmp_archive = os.path.join(tmp_dir, upload.filename)
|
|
|
|
|
+ sha256 = hashlib.sha256()
|
|
|
|
|
+ with open(tmp_archive, "wb") as out:
|
|
|
|
|
+ for chunk_file in chunk_files:
|
|
|
|
|
+ with open(chunk_file, "rb") as f:
|
|
|
|
|
+ data = f.read()
|
|
|
|
|
+ out.write(data)
|
|
|
|
|
+ sha256.update(data)
|
|
|
|
|
+
|
|
|
|
|
+ if upload.checksum and sha256.hexdigest() != upload.checksum:
|
|
|
|
|
+ upload.status = "error"
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ shutil.rmtree(tmp_dir, ignore_errors=True)
|
|
|
|
|
+ return jsonify({"error": "checksum invalide"}), 400
|
|
|
|
|
+
|
|
|
|
|
+ dest_path = os.path.join(backup_dir, upload.filename)
|
|
|
|
|
+ result = subprocess.run(
|
|
|
|
|
+ ["sudo", "rsync", tmp_archive, dest_path],
|
|
|
|
|
+ capture_output=True, text=True,
|
|
|
|
|
+ )
|
|
|
|
|
+ shutil.rmtree(tmp_dir, ignore_errors=True)
|
|
|
|
|
+
|
|
|
|
|
+ if result.returncode != 0:
|
|
|
|
|
+ upload.status = "error"
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ return jsonify({"error": result.stderr.strip()}), 500
|
|
|
|
|
+
|
|
|
|
|
+ upload.status = "complete"
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ return jsonify({"status": "complete", "filename": upload.filename})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/api/v1/archives/upload/<upload_id>", methods=["DELETE"])
|
|
|
|
|
+def api_upload_cancel(upload_id):
|
|
|
|
|
+ upload = db.get_or_404(Upload, upload_id)
|
|
|
|
|
+ tmp_dir = os.path.join(app.config["DATA_DIR"], "uploads", upload_id)
|
|
|
|
|
+ shutil.rmtree(tmp_dir, ignore_errors=True)
|
|
|
|
|
+ db.session.delete(upload)
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ return jsonify({"status": "cancelled"})
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+# --- Instances distantes (3B) -------------------------------------------------
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/remote-instances")
|
|
|
|
|
+def remote_instances_list():
|
|
|
|
|
+ instances = RemoteInstance.query.order_by(RemoteInstance.name).all()
|
|
|
|
|
+ return render_template("remote_instances.html", instances=instances)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/remote-instances/new", methods=["GET", "POST"])
|
|
|
|
|
+def remote_instance_new():
|
|
|
|
|
+ if request.method == "POST":
|
|
|
|
|
+ return _save_remote_instance(None)
|
|
|
|
|
+ return render_template("remote_instance_form.html", inst=None)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/remote-instances/<int:inst_id>/edit", methods=["GET", "POST"])
|
|
|
|
|
+def remote_instance_edit(inst_id):
|
|
|
|
|
+ inst = db.get_or_404(RemoteInstance, inst_id)
|
|
|
|
|
+ if request.method == "POST":
|
|
|
|
|
+ return _save_remote_instance(inst)
|
|
|
|
|
+ return render_template("remote_instance_form.html", inst=inst)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/remote-instances/<int:inst_id>/delete", methods=["POST"])
|
|
|
|
|
+def remote_instance_delete(inst_id):
|
|
|
|
|
+ inst = db.get_or_404(RemoteInstance, inst_id)
|
|
|
|
|
+ db.session.delete(inst)
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ flash(f"Instance « {inst.name} » supprimée.", "success")
|
|
|
|
|
+ return redirect(url_for("remote_instances_list"))
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/remote-instances/<int:inst_id>/test", methods=["POST"])
|
|
|
|
|
+def remote_instance_test(inst_id):
|
|
|
|
|
+ inst = db.get_or_404(RemoteInstance, inst_id)
|
|
|
|
|
+ from federation.client import FederationClient
|
|
|
|
|
+ try:
|
|
|
|
|
+ data = FederationClient(inst).health()
|
|
|
|
|
+ inst.status = "online"
|
|
|
|
|
+ inst.last_seen = datetime.utcnow()
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ flash(f"Instance « {inst.name} » en ligne — {data.get('instance', '?')}.", "success")
|
|
|
|
|
+ except Exception as exc:
|
|
|
|
|
+ inst.status = "error"
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ flash(f"Connexion échouée vers « {inst.name} » : {exc}", "error")
|
|
|
|
|
+ return redirect(url_for("remote_instances_list"))
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+@app.route("/remote-instances/<int:inst_id>/sync", methods=["POST"])
|
|
|
|
|
+def remote_instance_sync(inst_id):
|
|
|
|
|
+ inst = db.get_or_404(RemoteInstance, inst_id)
|
|
|
|
|
+ from federation.client import sync_instance
|
|
|
|
|
+ try:
|
|
|
|
|
+ sync_instance(inst)
|
|
|
|
|
+ flash(f"Instance « {inst.name} » synchronisée.", "success")
|
|
|
|
|
+ except Exception as exc:
|
|
|
|
|
+ flash(f"Synchronisation échouée pour « {inst.name} » : {exc}", "error")
|
|
|
|
|
+ return redirect(url_for("remote_instances_list"))
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+def _save_remote_instance(inst):
|
|
|
|
|
+ f = request.form
|
|
|
|
|
+ name = f.get("name", "").strip()
|
|
|
|
|
+ url = f.get("url", "").strip().rstrip("/")
|
|
|
|
|
+ api_key = f.get("api_key", "").strip()
|
|
|
|
|
+
|
|
|
|
|
+ if not name or not url or not api_key:
|
|
|
|
|
+ flash("Nom, URL et token API sont requis.", "error")
|
|
|
|
|
+ return render_template("remote_instance_form.html", inst=inst)
|
|
|
|
|
+
|
|
|
|
|
+ if inst is None:
|
|
|
|
|
+ inst = RemoteInstance()
|
|
|
|
|
+ db.session.add(inst)
|
|
|
|
|
+
|
|
|
|
|
+ inst.name = name
|
|
|
|
|
+ inst.url = url
|
|
|
|
|
+ inst.api_key = api_key
|
|
|
|
|
+ db.session.commit()
|
|
|
|
|
+ flash(f"Instance « {inst.name} » enregistrée.", "success")
|
|
|
|
|
+ return redirect(url_for("remote_instances_list"))
|