Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -120,3 +120,17 @@ DOCKER_PROXY_ENABLED=false
CN_MODE=false
CN_APT_MIRROR=https://mirrors.aliyun.com/ubuntu
CN_DOCKER_MIRROR=https://docker.m.daocloud.io

# -----------------------------------------------------------------------------
# NOTIFICATIONS (ntfy + Gotify)
# -----------------------------------------------------------------------------
# ntfy access token (optional, for protected topics)
# Generate: docker exec -it ntfy ntfy token add --expires=never admin
NTFY_TOKEN=

# Gotify application token (optional, for fallback notifications)
# Create in Gotify UI: Apps -> Create Application
GOTIFY_TOKEN=

# Notification fallback settings
FALLBACK_ENABLED=true
52 changes: 44 additions & 8 deletions config/alertmanager/alertmanager.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
# =============================================================================
# Alertmanager Configuration
# With ntfy integration for push notifications
# =============================================================================

global:
resolve_timeout: 5m
smtp_require_tls: false
Expand All @@ -7,21 +12,52 @@ route:
group_wait: 30s
group_interval: 5m
repeat_interval: 12h
receiver: default
receiver: ntfy
routes:
# Critical alerts - immediate notification
- match:
severity: critical
receiver: default
receiver: ntfy-critical
continue: true

# Warning alerts - batched notification
- match:
severity: warning
receiver: ntfy
continue: true

receivers:
# ---------------------------------------------------------------------------
# ntfy - Default notification receiver
# ---------------------------------------------------------------------------
- name: ntfy
webhook_configs:
- url: 'http://ntfy:80/homelab-alerts'
send_resolved: true
http_config:
headers:
Title: 'Homelab Alert'
Priority: 'default'
Tags: 'warning,alert'

# ---------------------------------------------------------------------------
# ntfy-critical - High priority alerts
# ---------------------------------------------------------------------------
- name: ntfy-critical
webhook_configs:
- url: 'http://ntfy:80/homelab-alerts-critical'
send_resolved: true
http_config:
headers:
Title: '🔴 CRITICAL: Homelab Alert'
Priority: 'high'
Tags: 'critical,alert'

# ---------------------------------------------------------------------------
# Default (fallback)
# ---------------------------------------------------------------------------
- name: default
# Uncomment and configure one of the following:
# webhook_configs:
# - url: http://gotify:80/message?token=YOUR_TOKEN
# slack_configs:
# - api_url: YOUR_SLACK_WEBHOOK
# channel: #alerts
# Placeholder for other notification methods

inhibit_rules:
- source_match:
Expand Down
31 changes: 31 additions & 0 deletions config/ntfy/server.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# =============================================================================
# ntfy Server Configuration
# https://ntfy.sh/docs/config/
# =============================================================================

# Base URL for the ntfy server
base-url: https://ntfy.${DOMAIN}

# Listen address (inside container)
listen-http: ":80"

# Behind reverse proxy
behind-proxy: true

# Authentication settings
auth-default-access: deny-all
auth-file: /var/lib/ntfy/user.db

# Cache settings
cache-file: /var/cache/ntfy/cache.db
cache-duration: "12h"

# Attachment settings
attachment-cache-dir: /var/cache/ntfy/attachments

# Rate limiting
global-topic-limit: 10000
visitor-subscription-limit: 30

# Logging
log-level: INFO
240 changes: 201 additions & 39 deletions scripts/backup-databases.sh
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,55 +1,217 @@
#!/usr/bin/env bash
#!/bin/bash
# =============================================================================
# HomeLab Database Backup Script
# Backs up PostgreSQL, Redis, and MariaDB to timestamped archives.
# Usage: ./backup-databases.sh [--postgres|--redis|--mariadb|--all]
# backup-databases.sh - Database backup script
# Usage: backup-databases.sh [--target local|s3] [--keep DAYS]
#
# Backs up PostgreSQL, Redis, and MariaDB databases
# =============================================================================

set -euo pipefail

SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
ROOT_DIR=$(dirname "$SCRIPT_DIR")
BACKUP_DIR="${BACKUP_DIR:-$ROOT_DIR/backups/databases}"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'

# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BACKUP_DIR="${BACKUP_DIR:-/var/backups/homelab}"
KEEP_DAYS="${KEEP_DAYS:-7}"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
DATE=$(date +%Y-%m-%d)

# Load environment
STACK_DIR="$(dirname "$SCRIPT_DIR")/stacks/databases"
if [ -f "$STACK_DIR/.env" ]; then
set -a
source "$STACK_DIR/.env"
set +a
fi

RED=''; GREEN=''; YELLOW=''; RESET=''
log_info() { echo -e "${GREEN}[INFO]${RESET} $*"; }
log_warn() { echo -e "${YELLOW}[WARN]${RESET} $*"; }
log_error() { echo -e "${RED}[ERROR]${RESET} $*" >&2; }
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--target)
BACKUP_TARGET="$2"
shift 2
;;
--keep)
KEEP_DAYS="$2"
shift 2
;;
*)
shift
;;
esac
done

echo -e "${GREEN}=== Database Backup ===${NC}"
echo "Timestamp: $TIMESTAMP"
echo "Backup directory: $BACKUP_DIR"
echo "Keep days: $KEEP_DAYS"
echo ""

# Create backup directory
mkdir -p "$BACKUP_DIR"

# -----------------------------------------------------------------------------
# PostgreSQL Backup
# -----------------------------------------------------------------------------
backup_postgres() {
log_info "Backing up PostgreSQL..."
local file="$BACKUP_DIR/postgres_${TIMESTAMP}.sql.gz"
docker exec homelab-postgres pg_dumpall -U "${POSTGRES_ROOT_USER:-postgres}" | gzip > "$file"
log_info "PostgreSQL backup: $file ($(du -sh "$file" | cut -f1))"
echo -e "${GREEN}Backing up PostgreSQL...${NC}"

if ! docker ps --format '{{.Names}}' | grep -q "homelab-postgres"; then
echo -e "${YELLOW} PostgreSQL container not running, skipping${NC}"
return 0
fi

local backup_file="${BACKUP_DIR}/postgres_${TIMESTAMP}.sql"

docker exec homelab-postgres pg_dumpall -U "${POSTGRES_ROOT_USER:-postgres}" > "$backup_file"

# Compress
gzip "$backup_file"

local size=$(du -h "${backup_file}.gz" | cut -f1)
echo -e " ${GREEN}✓ PostgreSQL backup: ${backup_file}.gz ($size)${NC}"
}

# -----------------------------------------------------------------------------
# Redis Backup
# -----------------------------------------------------------------------------
backup_redis() {
log_info "Backing up Redis..."
local file="$BACKUP_DIR/redis_${TIMESTAMP}.rdb"
docker exec homelab-redis redis-cli -a "${REDIS_PASSWORD}" --no-auth-warning BGSAVE
sleep 2
docker cp homelab-redis:/data/dump.rdb "$file"
log_info "Redis backup: $file"
echo -e "${GREEN}Backing up Redis...${NC}"

if ! docker ps --format '{{.Names}}' | grep -q "homelab-redis"; then
echo -e "${YELLOW} Redis container not running, skipping${NC}"
return 0
fi

# Trigger BGSAVE
docker exec homelab-redis redis-cli -a "${REDIS_PASSWORD}" BGSAVE 2>/dev/null

# Wait for save to complete
sleep 2

local backup_file="${BACKUP_DIR}/redis_${TIMESTAMP}.rdb"

# Copy RDB file
docker cp homelab-redis:/data/dump.rdb "$backup_file"

# Compress
gzip "$backup_file"

local size=$(du -h "${backup_file}.gz" | cut -f1)
echo -e " ${GREEN}✓ Redis backup: ${backup_file}.gz ($size)${NC}"
}

# -----------------------------------------------------------------------------
# MariaDB Backup
# -----------------------------------------------------------------------------
backup_mariadb() {
log_info "Backing up MariaDB..."
local file="$BACKUP_DIR/mariadb_${TIMESTAMP}.sql.gz"
docker exec homelab-mariadb mariadb-dump --all-databases -u root -p"${MARIADB_ROOT_PASSWORD}" | gzip > "$file"
log_info "MariaDB backup: $file ($(du -sh "$file" | cut -f1))"
}

case "${1:---all}" in
--postgres) backup_postgres ;;
--redis) backup_redis ;;
--mariadb) backup_mariadb ;;
--all)
backup_postgres
backup_redis
backup_mariadb
log_info "All backups completed in $BACKUP_DIR"
;;
*) echo "Usage: $0 [--postgres|--redis|--mariadb|--all]"; exit 1 ;;
esac
echo -e "${GREEN}Backing up MariaDB...${NC}"

if ! docker ps --format '{{.Names}}' | grep -q "homelab-mariadb"; then
echo -e "${YELLOW} MariaDB container not running, skipping${NC}"
return 0
fi

local backup_file="${BACKUP_DIR}/mariadb_${TIMESTAMP}.sql"

docker exec homelab-mariadb mysqldump -u root -p"${MARIADB_ROOT_PASSWORD}" --all-databases > "$backup_file" 2>/dev/null

# Compress
gzip "$backup_file"

local size=$(du -h "${backup_file}.gz" | cut -f1)
echo -e " ${GREEN}✓ MariaDB backup: ${backup_file}.gz ($size)${NC}"
}

# -----------------------------------------------------------------------------
# Create combined archive
# -----------------------------------------------------------------------------
create_archive() {
echo -e "${GREEN}Creating combined archive...${NC}"

local archive="${BACKUP_DIR}/databases_${TIMESTAMP}.tar.gz"

# Find today's backups
find "$BACKUP_DIR" -name "*_${TIMESTAMP}*" -type f | tar -czf "$archive" -T -

local size=$(du -h "$archive" | cut -f1)
echo -e " ${GREEN}✓ Archive: $archive ($size)${NC}"

# Remove individual files (keep only archive)
find "$BACKUP_DIR" -name "*_${TIMESTAMP}*.gz" ! -name "databases_*" -delete
}

# -----------------------------------------------------------------------------
# Cleanup old backups
# -----------------------------------------------------------------------------
cleanup_old_backups() {
echo -e "${GREEN}Cleaning up old backups...${NC}"

local count=$(find "$BACKUP_DIR" -name "databases_*.tar.gz" -mtime +${KEEP_DAYS} -delete -print | wc -l)
echo -e " ${GREEN}✓ Removed $count old backup(s)${NC}"
}

# -----------------------------------------------------------------------------
# Optional: Upload to S3/MinIO
# -----------------------------------------------------------------------------
upload_to_s3() {
if [ "${BACKUP_TARGET:-}" = "s3" ] && [ -n "${S3_BUCKET:-}" ]; then
echo -e "${GREEN}Uploading to S3...${NC}"

local archive="${BACKUP_DIR}/databases_${TIMESTAMP}.tar.gz"

if command -v aws &>/dev/null; then
aws s3 cp "$archive" "s3://${S3_BUCKET}/backups/databases/"
echo -e " ${GREEN}✓ Uploaded to s3://${S3_BUCKET}/backups/databases/${NC}"
elif command -v mc &>/dev/null; then
mc cp "$archive" "${S3_ALIAS:-minio}/${S3_BUCKET}/backups/databases/"
echo -e " ${GREEN}✓ Uploaded to MinIO${NC}"
else
echo -e "${YELLOW} Warning: No S3 client found (aws-cli or mc)${NC}"
fi
fi
}

# -----------------------------------------------------------------------------
# Send notification
# -----------------------------------------------------------------------------
send_notification() {
local status="$1"
local message="$2"

if [ -f "$SCRIPT_DIR/notify.sh" ]; then
export NTFY_URL="${NTFY_URL:-https://ntfy.${DOMAIN:-localhost}}"
bash "$SCRIPT_DIR/notify.sh" backups "Database Backup ${status}" "$message" "${status}" 2>/dev/null || true
fi
}

# -----------------------------------------------------------------------------
# Main
# -----------------------------------------------------------------------------
main() {
local errors=0

backup_postgres || errors=$((errors + 1))
backup_redis || errors=$((errors + 1))
backup_mariadb || errors=$((errors + 1))

create_archive
cleanup_old_backups
upload_to_s3

echo ""
if [ $errors -eq 0 ]; then
echo -e "${GREEN}=== Backup Complete ===${NC}"
send_notification "default" "All databases backed up successfully"
else
echo -e "${YELLOW}=== Backup Complete with $errors error(s) ===${NC}"
send_notification "high" "Database backup completed with $errors error(s)"
fi
}

main
Loading