Sync from main server - 2026-05-13 01:06:32

This commit is contained in:
root
2026-05-13 01:06:32 +02:00
parent 09bbe0403c
commit 6158b34613
8 changed files with 2159 additions and 129 deletions

View File

@@ -2,6 +2,10 @@
# =============================================
# backup-myapps.sh — Run on MAIN SERVER
# Backs up: Frappe, Nextcloud, Mautic, n8n, Odoo
# Storage tiers:
# 1. Local → /root/backups/
# 2. VM → SSH tunnel → /backups/main-server/
# 3. Cloud → Cloudflare R2 (S3-compatible)
# Usage: ./backup-myapps.sh
# =============================================
@@ -19,6 +23,20 @@ VM_PORT="2223"
VM_KEY="/root/.ssh/contabo-key"
VM_DEST="/backups/main-server/"
# ── Cloudflare R2 config ─────────────────────────────────────────────────────
# Set these via environment or export before running the script
R2_ACCOUNT_ID="${R2_ACCOUNT_ID:-35e00c230cc8066252a2d9890b69aea2}"
R2_BUCKET="${R2_BUCKET_NAME:-navitrends-backups}"
R2_ENDPOINT="https://${R2_ACCOUNT_ID}.r2.cloudflarestorage.com"
# AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY must be exported in the env
# or set in /root/.r2-credentials (sourced below)
CREDENTIALS_FILE="/root/.r2-credentials"
if [ -f "$CREDENTIALS_FILE" ]; then
# shellcheck source=/dev/null
source "$CREDENTIALS_FILE"
fi
# Log file for backup status (used by boot-check script)
BACKUP_LOG_FILE="/root/backups/backup-status.log"
MAX_BACKUPS=10
@@ -34,6 +52,7 @@ log_status() {
echo "========================================="
echo "📦 Starting Backup: $BACKUP_NAME"
echo " Apps: Frappe, Nextcloud, Mautic, n8n, Odoo"
echo " Tiers: Local → VM → ☁ Cloudflare R2"
echo "========================================="
mkdir -p "$BACKUP_DIR"
@@ -153,12 +172,15 @@ Hostname: $(hostname)
Apps: Frappe, Nextcloud, Mautic, n8n, Odoo
Volumes: $VOLUME_COUNT volume(s) backed up
Docker info: $(docker --version)
Storage Tiers:
- Local: /root/backups/
- VM: ${VM_HOST}:${VM_PORT} → ${VM_DEST}
- Cloud: Cloudflare R2 → s3://${R2_BUCKET}/backups/
Volumes included:
$(ls volumes/*.tar.gz 2>/dev/null | xargs -I{} basename {} || echo "none")
EOF
# Write individual volume checksums for integrity verification later
echo "" >> backup-info.txt
echo "Volume SHA256 checksums:" >> backup-info.txt
for f in volumes/*.tar.gz; do
@@ -177,11 +199,9 @@ tar -czf "${BACKUP_NAME}.tar.gz" "${BACKUP_NAME}/"
COMPRESSED_SIZE=$(du -h "${BACKUP_NAME}.tar.gz" | cut -f1)
echo " ✅ Compressed size: $COMPRESSED_SIZE$BACKUP_ARCHIVE"
# Write a top-level SHA256 for the final archive (used by health-check)
sha256sum "${BACKUP_NAME}.tar.gz" > "${BACKUP_NAME}.tar.gz.sha256"
echo " ✅ Checksum written: ${BACKUP_NAME}.tar.gz.sha256"
# Remove staging directory now that archive is created
rm -rf "$BACKUP_DIR"
# --------------------------------------------------
@@ -205,10 +225,10 @@ else
fi
# --------------------------------------------------
# 9. Send to VM over SSH
# 9. Send to VM over SSH [TIER 2]
# --------------------------------------------------
echo ""
echo "📤 Sending backup to VM (${VM_HOST}:${VM_PORT})..."
echo "📤 [TIER 2] Sending backup to VM (${VM_HOST}:${VM_PORT})..."
scp -i "$VM_KEY" \
-P "$VM_PORT" \
-o StrictHostKeyChecking=no \
@@ -218,30 +238,80 @@ scp -i "$VM_KEY" \
if [ $? -eq 0 ]; then
echo " ✅ Backup sent to VM successfully!"
# Also send the checksum file
scp -i "$VM_KEY" -P "$VM_PORT" -o StrictHostKeyChecking=no \
"${BACKUP_NAME}.tar.gz.sha256" "${VM_USER}@${VM_HOST}:${VM_DEST}" 2>/dev/null || true
else
echo " ❌ Transfer failed. The compressed backup is still at:"
echo " $BACKUP_ARCHIVE"
echo " 💡 Retry manually:"
echo " scp -i $VM_KEY -P $VM_PORT $BACKUP_ARCHIVE ${VM_USER}@${VM_HOST}:${VM_DEST}"
echo "VM transfer failed. Local backup still at: $BACKUP_ARCHIVE"
fi
# --------------------------------------------------
# 10. Write final status to log
# 10. Upload to Cloudflare R2 [TIER 3 — CLOUD]
# --------------------------------------------------
log_status "SUCCESS" "$BACKUP_NAME" "size=${COMPRESSED_SIZE}"
echo ""
echo "☁️ [TIER 3] Uploading to Cloudflare R2..."
echo " Bucket: ${R2_BUCKET}"
echo " Endpoint: ${R2_ENDPOINT}"
# Check if AWS CLI is available (used for S3-compatible uploads)
if command -v aws &>/dev/null && [ -n "${AWS_ACCESS_KEY_ID:-}" ] && [ -n "${AWS_SECRET_ACCESS_KEY:-}" ]; then
# Upload main archive
aws s3 cp "${BACKUP_NAME}.tar.gz" \
"s3://${R2_BUCKET}/backups/${BACKUP_NAME}.tar.gz" \
--endpoint-url "${R2_ENDPOINT}" \
--no-progress \
&& echo " ✅ R2 upload complete: s3://${R2_BUCKET}/backups/${BACKUP_NAME}.tar.gz" \
|| echo " ❌ R2 upload failed"
# Upload checksum
aws s3 cp "${BACKUP_NAME}.tar.gz.sha256" \
"s3://${R2_BUCKET}/backups/${BACKUP_NAME}.tar.gz.sha256" \
--endpoint-url "${R2_ENDPOINT}" \
--no-prth
os.environ.setdefault('R2_ACCOUNT_ID', '${R2_ACCOUNT_ID}')
os.environ.setdefault('R2_BUCKET_NAME', '${R2_BUCKET}')
try:
import boto3
client = boto3.client(
's3',
endpoint_url='${R2_ENDPOINT}',
aws_access_key_id=os.environ.get('AWS_ACCESS_KEY_ID',''),
aws_secret_access_key=os.environ.get('AWS_SECRET_ACCESS_KEY',''),
region_name='auto'
)
archive = '/root/backups/${BACKUP_NAME}.tar.gz'
key = 'backups/${BACKUP_NAME}.tar.gz'
size = os.path.getsize(archive)
print(f' Uploading {size/1024/1024:.1f} MB...')
client.upload_file(archive, '${R2_BUCKET}', key)
print(' ✅ R2 upload complete (boto3 fallback)')
except Exception as e:
print(f' ❌ R2 upload failed: {e}')
sys.exit(1)
PYEOF
R2_STATUS="uploaded via boto3"
else
echo " ⚠️ Skipping R2 upload: neither aws-cli nor python3/boto3 available"
echo " 💡 Install: pip install boto3 OR apt install awscli"
echo " 💡 Then set: export AWS_ACCESS_KEY_ID=... AWS_SECRET_ACCESS_KEY=..."
R2_STATUS="skipped (no cli available)"
fi
# --------------------------------------------------
# 11. Write final status to log
# --------------------------------------------------
log_status "SUCCESS" "$BACKUP_NAME" "size=${COMPRESSED_SIZE} | r2=${R2_STATUS}"
echo ""
echo "========================================="
echo "✅ BACKUP COMPLETE"
echo " Name: $BACKUP_NAME"
echo " Local: $BACKUP_ARCHIVE ($COMPRESSED_SIZE)"
echo " Remote: ${VM_HOST}:${VM_DEST}${BACKUP_NAME}.tar.gz"
echo "✅ BACKUP COMPLETE — 3-tier storage"
echo ""
echo " 📦 Name: $BACKUP_NAME"
echo " 💾 Local: $BACKUP_ARCHIVE ($COMPRESSED_SIZE)"
echo " 🖥️ VM: ${VM_HOST}:${VM_DEST}${BACKUP_NAME}.tar.gz"
echo " ☁️ R2: s3://${R2_BUCKET}/backups/${BACKUP_NAME}.tar.gz"
echo "========================================="
# ── Chiffrement AES-256 ──────────────────────────────────────────────────────
# ── AES-256 encryption (optional, call manually) ─────────────────────────────
encrypt_backup() {
echo "🔐 Chiffrement AES-256..."
openssl enc -aes-256-cbc -pbkdf2 -pass pass:Navitrends2024! \
@@ -251,9 +321,8 @@ encrypt_backup() {
echo "✅ Archive chiffrée : ${BACKUP_ARCHIVE}.enc"
}
# ── Notification email échec ─────────────────────────────────────────────────
notify_failure() {
echo "📧 Envoi notification échec..."
echo "Backup FAILED: $BACKUP_NAME" | \
mail -s "[Navitrends] BACKUP FAILED - $(date)" arijabidi577@gmail.com
mail -s "[Navitrends] BACKUP FAILED - $(date)" ameniboukottaya@gmail.com
}