Sync from main server - 2026-05-13 01:06:32

This commit is contained in:
root
2026-05-13 01:06:32 +02:00
parent 09bbe0403c
commit 6158b34613
8 changed files with 2159 additions and 129 deletions

View File

@@ -1,10 +1,12 @@
# app.py
from flask import Flask, render_template, request, redirect, url_for, session, jsonify
import os
import re
import subprocess
import threading
import uuid
import time
from datetime import datetime, timezone
from config import (
MAIN_SERVER_IP, RUNNING_ON_MAIN_SERVER,
@@ -20,17 +22,21 @@ from modules.backups import (
container_action, get_container_status,
audit_backup, delete_backup,
get_backup_log_entries, get_backup_script_path,
_ssh_main, _human_bytes, _run,
)
from modules.commands import run_command
from modules.users import (
get_all_users, get_user_containers, get_all_users_containers,
create_user, delete_user, get_user_disk_usage,
)
from modules.cloud_backup import (
r2_test_connection, r2_list_backups, r2_get_bucket_stats,
r2_delete_backup, r2_upload_async, get_upload_job,
r2_is_configured, R2_BUCKET_NAME,
)
app = Flask(__name__)
app.secret_key = 'navitrends-secret-key-2025'
# Increase default timeout for slow VM→main-server SSH calls
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
restore_jobs = {}
@@ -74,14 +80,12 @@ def _stream_backup(job_id, script_path):
# ─────────────────────────────────────────────
# DASHBOARD
# Loads instantly — all heavy data fetched async via JS after page renders
# PAGES
# ─────────────────────────────────────────────
@app.route('/')
@login_required
def dashboard():
# On the VM: skip slow SSH calls at page load — JS fetches them async via /api/dashboard
# On the main server: fetch everything normally (local calls, no SSH delay)
backups = get_local_backups()
vm_backups = get_vm_backups()
@@ -91,7 +95,7 @@ def dashboard():
system = get_system_info()
users = get_all_users()
else:
containers = [] # loaded async by JS via /api/dashboard
containers = []
running_count = 0
system = {}
users = []
@@ -141,7 +145,7 @@ def backups_page():
def restore_page():
prefill = {
'source': request.args.get('source', '').strip(),
'file': request.args.get('file', '').strip(),
'file': request.args.get('file', '').strip(),
}
return render_template(
'pages/restore.html',
@@ -158,8 +162,6 @@ def restore_page():
@app.route('/users')
@login_required
def users_page():
# On VM: skip slow SSH call — JS loads users async via /api/users
# On main server: fetch normally (local, fast)
users = get_all_users() if RUNNING_ON_MAIN_SERVER else []
return render_template(
'pages/users.html',
@@ -174,8 +176,6 @@ def users_page():
@app.route('/settings')
@login_required
def settings_page():
# On VM: skip slow SSH call — JS loads system info async via /api/system
# On main server: fetch normally (local, fast)
system = get_system_info() if RUNNING_ON_MAIN_SERVER else {}
return render_template(
'pages/settings.html',
@@ -188,9 +188,26 @@ def settings_page():
)
@app.route('/cloud')
@login_required
def cloud_page():
return render_template(
'pages/cloud.html',
local_backups=get_local_backups(),
vm_backups=get_vm_backups(),
main_server=MAIN_SERVER_IP,
r2_bucket=R2_BUCKET_NAME,
r2_configured=r2_is_configured(),
active_page='cloud',
page_title='Cloud Storage',
page_subtitle='Cloudflare R2'
)
# ─────────────────────────────────────────────
# API — system + stats (always from main server)
# API — system + stats
# ─────────────────────────────────────────────
@app.route('/api/system')
@login_required
def api_system():
@@ -214,7 +231,6 @@ def api_containers():
@app.route('/api/containers/all')
@login_required
def api_containers_all():
"""Root containers + rootless-user containers, all from main server."""
root_ctrs = get_all_root_containers()
user_ctrs = get_rootless_user_containers_remote()
all_ctrs = root_ctrs + user_ctrs
@@ -225,7 +241,6 @@ def api_containers_all():
@app.route('/api/nav-summary')
@login_required
def api_nav_summary():
"""Lightweight counts for sidebar badges (one round trip)."""
root_ctrs = get_all_root_containers()
user_ctrs = get_rootless_user_containers_remote()
all_ctrs = root_ctrs + user_ctrs
@@ -236,16 +251,9 @@ def api_nav_summary():
})
# ─────────────────────────────────────────────
# API — dashboard summary (fast async load)
# ─────────────────────────────────────────────
@app.route('/api/dashboard')
@login_required
def api_dashboard():
"""
Single endpoint the dashboard JS calls after page render.
Returns system info + container summary + user count in one shot.
"""
system = get_system_info()
root_ctrs = get_all_root_containers()
user_ctrs = get_rootless_user_containers_remote()
@@ -254,18 +262,19 @@ def api_dashboard():
running = sum(1 for c in all_ctrs if 'Up' in c.get('status', ''))
return jsonify({
'system': system,
'containers': all_ctrs,
'running_count': running,
'user_count': len(users),
'local_backups': len(get_local_backups()),
'vm_backups': len(get_vm_backups()),
'system': system,
'containers': all_ctrs,
'running_count': running,
'user_count': len(users),
'local_backups': len(get_local_backups()),
'vm_backups': len(get_vm_backups()),
})
# ─────────────────────────────────────────────
# API — container actions
# ─────────────────────────────────────────────
@app.route('/api/container/action', methods=['POST'])
@login_required
def api_container_action():
@@ -291,13 +300,13 @@ def api_container_action():
@app.route('/api/container/status/<name>')
@login_required
def api_container_status(name):
status_info = get_container_status(name)
return jsonify(status_info)
return jsonify(get_container_status(name))
# ─────────────────────────────────────────────
# API — backups
# ─────────────────────────────────────────────
@app.route('/api/backups')
@login_required
def api_backups():
@@ -307,7 +316,7 @@ def api_backups():
@app.route('/api/backups/log')
@login_required
def api_backup_log():
limit = int(request.args.get('limit', 20))
limit = int(request.args.get('limit', 20))
entries = get_backup_log_entries(limit)
return jsonify({'entries': entries})
@@ -375,9 +384,130 @@ def api_backup_run_status(job_id):
})
@app.route('/api/backups/details', methods=['POST'])
@login_required
def api_backup_details():
"""
Fast metadata for the Details popup — no gzip scan, just stat + sha sidecar.
Body: { backup_file: "myapps-backup-YYYYMMDD_HHMMSS.tar.gz", source: "local"|"vm" }
"""
data = request.get_json() or {}
backup_file = data.get('backup_file', '').strip()
source = data.get('source', 'local').strip()
if not re.match(r'^myapps-backup-\d{8}_\d{6}\.tar\.gz$', backup_file):
return jsonify({'error': 'Invalid filename'}), 400
if source == 'local':
archive_path = f'/root/backups/{backup_file}'
sha_path = archive_path + '.sha256'
else:
archive_path = f'/backups/main-server/{backup_file}'
sha_path = archive_path + '.sha256'
result = {
'backup_file': backup_file,
'source': source,
'path': archive_path,
'size_bytes': None,
'size_human': None,
'created_at': None,
'age_days': None,
'sha256': None,
'on_cloud': False,
}
# ── size + mtime ──────────────────────────────────────────────
if source == 'local':
if RUNNING_ON_MAIN_SERVER:
# direct stat on this machine
if os.path.exists(archive_path):
st = os.stat(archive_path)
result['size_bytes'] = st.st_size
result['size_human'] = _human_bytes(st.st_size)
mtime = datetime.fromtimestamp(st.st_mtime, tz=timezone.utc)
result['created_at'] = mtime.strftime('%Y-%m-%d %H:%M:%S UTC')
result['age_days'] = (datetime.now(tz=timezone.utc) - mtime).days
if os.path.exists(sha_path):
try:
with open(sha_path) as f:
result['sha256'] = f.read().split()[0].strip()
except Exception:
pass
else:
# SSH to main server
stat_out, _ = _ssh_main(f"stat -c '%s %Y' {archive_path} 2>/dev/null")
if stat_out:
parts = stat_out.split()
if len(parts) >= 2:
size_bytes = int(parts[0])
mtime = datetime.fromtimestamp(int(parts[1]), tz=timezone.utc)
result['size_bytes'] = size_bytes
result['size_human'] = _human_bytes(size_bytes)
result['created_at'] = mtime.strftime('%Y-%m-%d %H:%M:%S UTC')
result['age_days'] = (datetime.now(tz=timezone.utc) - mtime).days
sha_out, _ = _ssh_main(f"cat {sha_path} 2>/dev/null | awk '{{print $1}}'")
if sha_out.strip():
result['sha256'] = sha_out.strip()
elif source == 'vm':
if not RUNNING_ON_MAIN_SERVER:
# we ARE the VM — direct stat
if os.path.exists(archive_path):
st = os.stat(archive_path)
result['size_bytes'] = st.st_size
result['size_human'] = _human_bytes(st.st_size)
mtime = datetime.fromtimestamp(st.st_mtime, tz=timezone.utc)
result['created_at'] = mtime.strftime('%Y-%m-%d %H:%M:%S UTC')
result['age_days'] = (datetime.now(tz=timezone.utc) - mtime).days
if os.path.exists(sha_path):
try:
with open(sha_path) as f:
result['sha256'] = f.read().split()[0].strip()
except Exception:
pass
else:
# SSH to VM
ssh_prefix = (
f"ssh -i {VM_KEY} -p {VM_PORT} "
f"-o StrictHostKeyChecking=no -o ConnectTimeout=10 -o BatchMode=yes "
f"{VM_USER}@{VM_HOST}"
)
stat_out, _ = _run(
f"{ssh_prefix} \"stat -c '%s %Y' {archive_path} 2>/dev/null\"",
timeout=20
)
if stat_out:
parts = stat_out.split()
if len(parts) >= 2:
size_bytes = int(parts[0])
mtime = datetime.fromtimestamp(int(parts[1]), tz=timezone.utc)
result['size_bytes'] = size_bytes
result['size_human'] = _human_bytes(size_bytes)
result['created_at'] = mtime.strftime('%Y-%m-%d %H:%M:%S UTC')
result['age_days'] = (datetime.now(tz=timezone.utc) - mtime).days
sha_out, _ = _run(
f"{ssh_prefix} \"cat {sha_path} 2>/dev/null | awk '{{print $1}}'\"",
timeout=15
)
if sha_out.strip():
result['sha256'] = sha_out.strip()
# ── R2 presence check (best-effort) ──────────────────────────
try:
r2_list = r2_list_backups()
r2_names = {b.get('name') for b in r2_list}
result['on_cloud'] = backup_file in r2_names
except Exception:
pass
return jsonify(result)
# ─────────────────────────────────────────────
# API — users
# ─────────────────────────────────────────────
@app.route('/api/users')
@login_required
def api_users():
@@ -399,10 +529,10 @@ def api_user_disk(username):
@app.route('/api/users/create', methods=['POST'])
@login_required
def api_create_user():
data = request.get_json() or {}
username = data.get('username', '').strip()
password = data.get('password', '').strip()
setup_docker = data.get('setup_docker', True)
data = request.get_json() or {}
username = data.get('username', '').strip()
password = data.get('password', '').strip()
setup_docker = data.get('setup_docker', True)
disk_quota_mb = data.get('disk_quota_mb')
if not username:
@@ -431,9 +561,113 @@ def api_delete_user():
return jsonify({'success': success, 'message': message})
# ─────────────────────────────────────────────
# API — Cloudflare R2 cloud storage
# ─────────────────────────────────────────────
@app.route('/api/cloud/r2/test')
@login_required
def api_r2_test():
return jsonify(r2_test_connection())
@app.route('/api/cloud/r2/stats')
@login_required
def api_r2_stats():
return jsonify(r2_get_bucket_stats())
@app.route('/api/cloud/r2/backups')
@login_required
def api_r2_list():
return jsonify({'backups': r2_list_backups()})
@app.route('/api/cloud/r2/delete', methods=['POST'])
@login_required
def api_r2_delete():
data = request.get_json() or {}
key = data.get('key', '').strip()
if not key:
return jsonify({'success': False, 'message': 'key required'}), 400
success, message = r2_delete_backup(key)
return jsonify({'success': success, 'message': message})
@app.route('/api/cloud/r2/audit', methods=['POST'])
@login_required
def api_r2_audit():
from modules.cloud_backup import r2_audit_backup
data = request.get_json() or {}
key = data.get('key', '').strip()
if not key:
return jsonify({'error': 'key required'}), 400
return jsonify(r2_audit_backup(key))
@app.route('/api/cloud/r2/upload', methods=['POST'])
@login_required
def api_r2_upload():
data = request.get_json() or {}
backup_file = data.get('backup_file', '').strip()
source = data.get('source', 'local').strip()
if not backup_file:
return jsonify({'success': False, 'message': 'backup_file required'}), 400
if source == 'local':
if RUNNING_ON_MAIN_SERVER:
local_path = f"/root/backups/{backup_file}"
else:
local_path = f"/tmp/{backup_file}"
if not os.path.exists(local_path):
pull_cmd = (
f"scp -i {MAIN_SERVER_KEY} -P {MAIN_SERVER_PORT} "
f"-o StrictHostKeyChecking=no -o ConnectTimeout=15 "
f"{MAIN_SERVER_USER}@{MAIN_SERVER_IP}:/root/backups/{backup_file} "
f"{local_path}"
)
res = subprocess.run(pull_cmd, shell=True, capture_output=True, text=True)
if res.returncode != 0:
return jsonify({'success': False, 'message': f'Failed to pull from main server: {res.stderr}'}), 500
else:
if RUNNING_ON_MAIN_SERVER:
local_path = f"/tmp/{backup_file}"
if not os.path.exists(local_path):
pull_cmd = (
f"scp -i {VM_KEY} -P {VM_PORT} "
f"-o StrictHostKeyChecking=no -o ConnectTimeout=15 "
f"{VM_USER}@{VM_HOST}:/backups/main-server/{backup_file} "
f"{local_path}"
)
res = subprocess.run(pull_cmd, shell=True, capture_output=True, text=True)
if res.returncode != 0:
return jsonify({'success': False, 'message': f'Failed to pull from VM: {res.stderr}'}), 500
else:
local_path = f"/backups/main-server/{backup_file}"
if not os.path.exists(local_path):
return jsonify({'success': False, 'message': f'File not found: {local_path}'}), 400
job_id = str(uuid.uuid4())
t = threading.Thread(target=r2_upload_async, args=(local_path, job_id), daemon=True)
t.start()
return jsonify({'success': True, 'job_id': job_id})
@app.route('/api/cloud/r2/upload/status/<job_id>')
@login_required
def api_r2_upload_status(job_id):
job = get_upload_job(job_id)
if not job:
return jsonify({'error': 'Job not found'}), 404
return jsonify(job)
# ─────────────────────────────────────────────
# RESTORE
# ─────────────────────────────────────────────
@app.route('/restore/start', methods=['POST'])
@login_required
def restore_start():
@@ -443,6 +677,7 @@ def restore_start():
backup_source = data.get('backup_source', 'local')
backup_file = data.get('backup_file', '').strip()
cloud_key = data.get('cloud_key', '').strip()
target = data.get('target', 'local')
remote_ip = data.get('remote_ip', '').strip()
remote_port = str(data.get('remote_port', '22')).strip() or '22'
@@ -454,7 +689,21 @@ def restore_start():
if not backup_file:
return jsonify({'error': 'No backup file specified'}), 400
if backup_source == 'local':
# ── Resolve backup path by source ────────────────────────────
if backup_source == 'cloud':
backup_path = f"/tmp/{backup_file}"
if not os.path.exists(backup_path):
from modules.cloud_backup import _get_r2_client, _get_r2_config
try:
cfg = _get_r2_config()
bucket = cfg["bucket_name"]
key = cloud_key or f"backups/{backup_file}"
client = _get_r2_client()
client.download_file(bucket, key, backup_path)
except Exception as e:
return jsonify({'error': f'Failed to download from R2: {e}'}), 500
elif backup_source == 'local':
if RUNNING_ON_MAIN_SERVER:
backup_path = f"/root/backups/{backup_file}"
if not os.path.exists(backup_path):
@@ -471,7 +720,7 @@ def restore_start():
res = subprocess.run(pull_cmd, shell=True, capture_output=True, text=True)
if res.returncode != 0:
return jsonify({'error': f'Failed to pull from main server: {res.stderr}'}), 500
else:
else: # vm
if RUNNING_ON_MAIN_SERVER:
backup_path = f"/tmp/{backup_file}"
if not os.path.exists(backup_path):
@@ -496,13 +745,13 @@ def restore_start():
return jsonify({'error': f'restore-myapps.sh not found at {restore_script_local}'}), 500
if target == 'local':
hostname = os.uname().nodename
hostname = os.uname().nodename
session_dir = f"/tmp/restore-session-{uuid.uuid4().hex[:8]}"
cmd = (
f"set -e && "
f"echo '🖥️ Restoring on this server ({hostname})...' && "
f"echo 'Restoring on this server ({hostname})...' && "
f"mkdir -p {session_dir} && "
f"echo '📂 Extracting backup...' && "
f"echo 'Extracting backup...' && "
f"tar -xzf {backup_path} -C {session_dir} --strip-components=1 && "
f"cp {restore_script_local} {session_dir}/restore-myapps.sh && "
f"chmod +x {session_dir}/restore-myapps.sh && "
@@ -527,14 +776,14 @@ def restore_start():
remote_dest = f"/backups/restore-session-{uuid.uuid4().hex[:8]}"
cmd = (
f"echo '🔗 Connecting to {remote_user}@{remote_ip}:{remote_port}...' && "
f"echo 'Connecting to {remote_user}@{remote_ip}:{remote_port}...' && "
f"{ssh_prefix} {remote_user}@{remote_ip} 'mkdir -p {remote_dest}' && "
f"echo 'Connected.' && "
f"echo '📤 Copying backup archive...' && "
f"echo 'Connected.' && "
f"echo 'Copying backup archive...' && "
f"{scp_prefix} {backup_path} {remote_user}@{remote_ip}:{remote_dest}/{backup_file} && "
f"echo '📤 Copying restore script...' && "
f"echo 'Copying restore script...' && "
f"{scp_prefix} {restore_script_local} {remote_user}@{remote_ip}:{remote_dest}/restore-myapps.sh && "
f"echo '🚀 Running restore on {remote_ip}:{remote_port}...' && "
f"echo 'Running restore on {remote_ip}:{remote_port}...' && "
f"{ssh_prefix} {remote_user}@{remote_ip} "
f"'set -e && cd {remote_dest} && "
f"tar -xzf {backup_file} --strip-components=1 && "
@@ -566,6 +815,7 @@ def restore_status_poll(job_id):
# ─────────────────────────────────────────────
# SERVER STATUS
# ─────────────────────────────────────────────
@app.route('/server/status')
@login_required
def server_status():
@@ -578,6 +828,7 @@ def server_status():
# ─────────────────────────────────────────────
# AUTH
# ─────────────────────────────────────────────
@app.route('/login', methods=['GET', 'POST'])
def login():
error = ''