#!/usr/bin/env bash # Hourly PostgreSQL backup for Port Nimara CRM. # # Reads DATABASE_URL and BACKUP_S3_* from the environment. Dumps to a # tmpfile, gzips, optionally GPG-encrypts to BACKUP_GPG_RECIPIENT, and # uploads to s3://${BACKUP_S3_BUCKET}/pg///.dump.gz[.gpg]. # # Designed to fail loud: any non-zero exit halts the script and propagates # to the cron / CI runner so the operator sees the failure. set -euo pipefail : "${DATABASE_URL:?DATABASE_URL not set}" : "${BACKUP_S3_BUCKET:?BACKUP_S3_BUCKET not set}" : "${BACKUP_S3_ENDPOINT:?BACKUP_S3_ENDPOINT not set}" : "${BACKUP_S3_ACCESS_KEY:?BACKUP_S3_ACCESS_KEY not set}" : "${BACKUP_S3_SECRET_KEY:?BACKUP_S3_SECRET_KEY not set}" HOST="${BACKUP_HOST_OVERRIDE:-$(hostname -s)}" DATE_UTC="$(date -u +%Y-%m-%d)" HOUR_UTC="$(date -u +%H)" WORKDIR="$(mktemp -d)" trap 'rm -rf "$WORKDIR"' EXIT DUMP_FILE="$WORKDIR/${HOUR_UTC}.dump" ARCHIVE_NAME="${HOUR_UTC}.dump.gz" echo "[$(date -u +%FT%TZ)] Dumping $DATABASE_URL → $DUMP_FILE" pg_dump --format=custom --compress=9 --no-owner --no-privileges \ --file="$DUMP_FILE" "$DATABASE_URL" # pg_dump's `custom` format is already compressed, but we wrap in gzip so # the file looks the same regardless of the dump format on disk. gzip -n "$DUMP_FILE" GZ_FILE="${DUMP_FILE}.gz" # Optional GPG layer. Only encrypt if the recipient is configured. if [[ -n "${BACKUP_GPG_RECIPIENT:-}" ]]; then echo "[$(date -u +%FT%TZ)] Encrypting for $BACKUP_GPG_RECIPIENT" gpg --batch --yes --trust-model always \ --recipient "$BACKUP_GPG_RECIPIENT" \ --encrypt --output "${GZ_FILE}.gpg" "$GZ_FILE" rm "$GZ_FILE" GZ_FILE="${GZ_FILE}.gpg" ARCHIVE_NAME="${ARCHIVE_NAME}.gpg" fi # Configure mc client for the backup destination. MC_ALIAS="bk-$$" mc alias set "$MC_ALIAS" "$BACKUP_S3_ENDPOINT" \ "$BACKUP_S3_ACCESS_KEY" "$BACKUP_S3_SECRET_KEY" \ --api S3v4 >/dev/null REMOTE_PATH="${MC_ALIAS}/${BACKUP_S3_BUCKET}/pg/${HOST}/${DATE_UTC}/${ARCHIVE_NAME}" echo "[$(date -u +%FT%TZ)] Uploading → $REMOTE_PATH" mc cp --quiet "$GZ_FILE" "$REMOTE_PATH" # Tag with retention metadata so lifecycle rules can decide what to expire. mc tag set "$REMOTE_PATH" "kind=hourly&host=${HOST}&date=${DATE_UTC}" >/dev/null mc alias remove "$MC_ALIAS" >/dev/null echo "[$(date -u +%FT%TZ)] OK ${ARCHIVE_NAME} ($(du -h "$GZ_FILE" | cut -f1))"