Complete implementation ready for Coolify: - Node.js 22 + Fastify + socket.io backend - PostgreSQL 16 + Redis 7 services - Docker Compose configuration - Deployment scripts and documentation Co-Authored-By: Paperclip <noreply@paperclip.ing>
71 lines
2.3 KiB
Bash
Executable file
71 lines
2.3 KiB
Bash
Executable file
#!/usr/bin/env bash
|
|
set -euo pipefail
|
|
|
|
# AgentHub Postgres Backup Script
|
|
# Runs nightly at 03:00 UTC via ofelia scheduler
|
|
# Retains 14 days locally, uploads weekly encrypted copy to Scaleway Object Storage
|
|
|
|
BACKUP_DIR="${BACKUP_DIR:-/backups}"
|
|
RETENTION_DAYS="${RETENTION_DAYS:-14}"
|
|
TIMESTAMP="$(date -u +%Y%m%d_%H%M%S)"
|
|
BACKUP_FILE="${BACKUP_DIR}/agenthub_${TIMESTAMP}.dump"
|
|
|
|
# Postgres connection from env
|
|
PGHOST="${PGHOST:-postgres}"
|
|
PGPORT="${PGPORT:-5432}"
|
|
PGDATABASE="${PGDATABASE:-agenthub}"
|
|
PGUSER="${PGUSER:-agenthub}"
|
|
|
|
echo "[$(date -Iseconds)] Starting backup to ${BACKUP_FILE}"
|
|
|
|
# Create backup directory if it doesn't exist
|
|
mkdir -p "${BACKUP_DIR}"
|
|
|
|
# Run pg_dump in custom format (-Fc) for efficient restore
|
|
pg_dump -Fc \
|
|
-h "${PGHOST}" \
|
|
-p "${PGPORT}" \
|
|
-U "${PGUSER}" \
|
|
-d "${PGDATABASE}" \
|
|
-f "${BACKUP_FILE}"
|
|
|
|
# Verify backup file exists and has non-zero size
|
|
if [[ ! -s "${BACKUP_FILE}" ]]; then
|
|
echo "[$(date -Iseconds)] ERROR: Backup file is empty or missing"
|
|
exit 1
|
|
fi
|
|
|
|
BACKUP_SIZE=$(stat -f%z "${BACKUP_FILE}" 2>/dev/null || stat -c%s "${BACKUP_FILE}")
|
|
echo "[$(date -Iseconds)] Backup completed: ${BACKUP_FILE} (${BACKUP_SIZE} bytes)"
|
|
|
|
# Cleanup old backups (keep last 14 days)
|
|
echo "[$(date -Iseconds)] Cleaning up backups older than ${RETENTION_DAYS} days"
|
|
find "${BACKUP_DIR}" -name "agenthub_*.dump" -type f -mtime +${RETENTION_DAYS} -delete
|
|
|
|
# Weekly encrypted upload to Scaleway (only on Sundays)
|
|
if [[ "$(date -u +%u)" == "7" ]] && [[ -n "${S3_ENDPOINT:-}" ]] && [[ -n "${S3_BUCKET:-}" ]]; then
|
|
echo "[$(date -Iseconds)] Weekly backup: encrypting and uploading to S3"
|
|
|
|
# GPG encrypt (requires GPG_RECIPIENT_KEY env var)
|
|
if [[ -z "${GPG_RECIPIENT_KEY:-}" ]]; then
|
|
echo "[$(date -Iseconds)] WARNING: GPG_RECIPIENT_KEY not set, skipping encryption"
|
|
else
|
|
gpg --batch --yes --trust-model always \
|
|
--recipient "${GPG_RECIPIENT_KEY}" \
|
|
--encrypt \
|
|
--output "${BACKUP_FILE}.gpg" \
|
|
"${BACKUP_FILE}"
|
|
|
|
# Upload to S3 (Scaleway Object Storage)
|
|
aws s3 cp \
|
|
"${BACKUP_FILE}.gpg" \
|
|
"s3://${S3_BUCKET}/weekly/$(basename "${BACKUP_FILE}.gpg")" \
|
|
--endpoint-url "${S3_ENDPOINT}"
|
|
|
|
# Remove local encrypted copy
|
|
rm -f "${BACKUP_FILE}.gpg"
|
|
echo "[$(date -Iseconds)] Weekly backup uploaded and encrypted copy removed"
|
|
fi
|
|
fi
|
|
|
|
echo "[$(date -Iseconds)] Backup process completed"
|