Initial commit

This commit is contained in:
Janus C. H. Knudsen 2026-02-03 00:17:08 +01:00
commit 77d35ff965
51 changed files with 5591 additions and 0 deletions

73
Scripts/backup-logs.sql Normal file
View file

@ -0,0 +1,73 @@
-- Backup Logs Database Schema
-- PostgreSQL schema for tracking backup operations
CREATE TABLE backup_logs (
id SERIAL PRIMARY KEY,
-- Timing
started_at TIMESTAMP NOT NULL DEFAULT NOW(),
completed_at TIMESTAMP,
duration_ms INTEGER,
-- Identifikation
backup_type VARCHAR(50) NOT NULL, -- 'forgejo_repos', 'postgres_db', etc.
source_name VARCHAR(255) NOT NULL, -- repo navn eller db navn
source_path VARCHAR(500), -- /var/lib/forgejo/repositories/user/repo.git
-- Destination
destination VARCHAR(50) NOT NULL, -- 'azure_blob', 's3', 'local', 'sftp'
remote_path VARCHAR(500), -- https://storage.blob.core.windows.net/backups/forgejo/2024-01-31/repo.tar.gz
-- Resultat
status VARCHAR(20) NOT NULL, -- 'running', 'success', 'failed', 'partial'
size_bytes BIGINT,
file_count INTEGER, -- antal filer i backup
-- Fejlhåndtering
error_message TEXT,
error_code VARCHAR(50), -- 'AZURE_UPLOAD_FAILED', 'DISK_FULL', 'TAR_FAILED', etc.
retry_count INTEGER DEFAULT 0,
-- Metadata
hostname VARCHAR(100), -- hvilken server kørte backup
script_version VARCHAR(20), -- version af backup script
checksum VARCHAR(64), -- SHA256 af backup fil
-- Indexes
created_at TIMESTAMP DEFAULT NOW()
);
-- Performance indexes
CREATE INDEX idx_backup_logs_started ON backup_logs(started_at DESC);
CREATE INDEX idx_backup_logs_type ON backup_logs(backup_type);
CREATE INDEX idx_backup_logs_status ON backup_logs(status);
CREATE INDEX idx_backup_logs_source ON backup_logs(source_name);
-- Composite index for repository summary queries
CREATE INDEX idx_backup_logs_source_started ON backup_logs(source_name, started_at DESC);
-- View for repository summaries (optional, can be used for performance)
CREATE OR REPLACE VIEW backup_repository_summary AS
SELECT
source_name,
backup_type,
COUNT(*) as total_backups,
COUNT(*) FILTER (WHERE status = 'success') as successful_backups,
COUNT(*) FILTER (WHERE status = 'failed') as failed_backups,
MAX(started_at) as last_backup,
MAX(started_at) FILTER (WHERE status = 'success') as last_successful_backup,
COALESCE(SUM(size_bytes) FILTER (WHERE status = 'success'), 0) as total_size_bytes
FROM backup_logs
GROUP BY source_name, backup_type;
-- Sample data for testing (optional)
-- INSERT INTO backup_logs (backup_type, source_name, source_path, destination, remote_path, status, size_bytes, hostname, script_version)
-- VALUES ('forgejo_repos', 'myorg/myrepo', '/var/lib/forgejo/data/forgejo-repositories/myorg/myrepo.git', 'azure_blob', 'https://storageptadmin.blob.core.windows.net/backups/forgejo/2024-01-31/myorg-myrepo.tar.gz', 'success', 1048576, 'forgejo-server', '2.0.0');
-- Bruger setup til backup script
CREATE USER backup_writer WITH PASSWORD 'your_secure_password_here';
GRANT CONNECT ON DATABASE ptadmin TO backup_writer;
GRANT USAGE ON SCHEMA public TO backup_writer;
GRANT SELECT, INSERT, UPDATE ON backup_logs TO backup_writer;
GRANT USAGE, SELECT ON SEQUENCE backup_logs_id_seq TO backup_writer;

View file

@ -0,0 +1,24 @@
# Forgejo Backup Configuration
# Copy this file to .env and update values
# Forgejo repository path
FORGEJO_REPO_PATH=/var/lib/forgejo/data/forgejo-repositories
# Temporary directory for creating archives
BACKUP_TEMP_DIR=/tmp/forgejo-backups
# PostgreSQL database for logging
BACKUP_DB_HOST=192.168.1.43
BACKUP_DB_PORT=5432
BACKUP_DB_NAME=ptadmin
BACKUP_DB_USER=plantempus_app
BACKUP_DB_PASSWORD=your_password_here
# Backup retention (days)
BACKUP_RETENTION_DAYS=30
# Azure Storage Configuration
AZURE_STORAGE_ACCOUNT=storageptadmin
AZURE_STORAGE_KEY=your_storage_key_here
AZURE_STORAGE_CONTAINER=backups
AZURE_STORAGE_PATH=forgejo

340
Scripts/forgejo-backup.sh Normal file
View file

@ -0,0 +1,340 @@
#!/bin/bash
#
# Forgejo Repository Backup Script
# Version: 2.0.0
#
# Backs up all Forgejo git repositories to Azure Blob Storage
# and logs results to PostgreSQL.
#
# Usage: ./forgejo-backup.sh
#
# Configuration via environment variables or .env file
set -euo pipefail
SCRIPT_VERSION="2.0.0"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
HOSTNAME=$(hostname)
DATE=$(date +%Y-%m-%d)
TIMESTAMP=$(date +%Y-%m-%d_%H%M%S)
# Load configuration from .env if it exists
if [[ -f "$SCRIPT_DIR/.env" ]]; then
source "$SCRIPT_DIR/.env"
fi
# Configuration with defaults
FORGEJO_REPO_PATH="${FORGEJO_REPO_PATH:-/var/lib/forgejo/data/forgejo-repositories}"
BACKUP_TEMP_DIR="${BACKUP_TEMP_DIR:-/tmp/forgejo-backups}"
BACKUP_DB_HOST="${BACKUP_DB_HOST:-localhost}"
BACKUP_DB_PORT="${BACKUP_DB_PORT:-5432}"
BACKUP_DB_NAME="${BACKUP_DB_NAME:-plantempus}"
BACKUP_DB_USER="${BACKUP_DB_USER:-backup_writer}"
BACKUP_DB_PASSWORD="${BACKUP_DB_PASSWORD:-}"
BACKUP_RETENTION_DAYS="${BACKUP_RETENTION_DAYS:-30}"
# Azure Storage Configuration
AZURE_STORAGE_ACCOUNT="${AZURE_STORAGE_ACCOUNT:-}"
AZURE_STORAGE_KEY="${AZURE_STORAGE_KEY:-}"
AZURE_STORAGE_CONTAINER="${AZURE_STORAGE_CONTAINER:-backups}"
AZURE_STORAGE_PATH="${AZURE_STORAGE_PATH:-forgejo}"
# Build Azure destination URL
AZURE_BLOB_URL="https://${AZURE_STORAGE_ACCOUNT}.blob.core.windows.net/${AZURE_STORAGE_CONTAINER}/${AZURE_STORAGE_PATH}"
# Logging functions
log_info() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] INFO: $*"
}
log_error() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: $*" >&2
}
log_warn() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] WARN: $*"
}
# Database logging function
db_log() {
local backup_type="$1"
local source_name="$2"
local source_path="$3"
local destination="$4"
local remote_path="$5"
local status="$6"
local size_bytes="${7:-}"
local error_message="${8:-}"
local error_code="${9:-}"
local checksum="${10:-}"
local started_at="${11:-}"
local file_count="${12:-}"
local duration_ms=""
if [[ -n "$started_at" && "$status" != "running" ]]; then
local start_epoch=$(date -d "$started_at" +%s 2>/dev/null || echo "")
local now_epoch=$(date +%s)
if [[ -n "$start_epoch" ]]; then
duration_ms=$(( (now_epoch - start_epoch) * 1000 ))
fi
fi
local completed_at=""
if [[ "$status" != "running" ]]; then
completed_at=$(date '+%Y-%m-%d %H:%M:%S')
fi
PGPASSWORD="$BACKUP_DB_PASSWORD" psql -h "$BACKUP_DB_HOST" -p "$BACKUP_DB_PORT" -U "$BACKUP_DB_USER" -d "$BACKUP_DB_NAME" -q <<EOF
INSERT INTO backup_logs (
backup_type, source_name, source_path, destination, remote_path,
status, size_bytes, file_count, error_message, error_code,
hostname, script_version, checksum, started_at, completed_at, duration_ms
) VALUES (
'$backup_type',
'$source_name',
'$source_path',
'$destination',
'$remote_path',
'$status',
$([ -n "$size_bytes" ] && echo "$size_bytes" || echo "NULL"),
$([ -n "$file_count" ] && echo "$file_count" || echo "NULL"),
$([ -n "$error_message" ] && echo "'$(echo "$error_message" | sed "s/'/''/g")'" || echo "NULL"),
$([ -n "$error_code" ] && echo "'$error_code'" || echo "NULL"),
'$HOSTNAME',
'$SCRIPT_VERSION',
$([ -n "$checksum" ] && echo "'$checksum'" || echo "NULL"),
$([ -n "$started_at" ] && echo "'$started_at'" || echo "NOW()"),
$([ -n "$completed_at" ] && echo "'$completed_at'" || echo "NULL"),
$([ -n "$duration_ms" ] && echo "$duration_ms" || echo "NULL")
);
EOF
}
# Update existing log entry status
db_update_status() {
local source_name="$1"
local started_at="$2"
local status="$3"
local size_bytes="${4:-}"
local error_message="${5:-}"
local error_code="${6:-}"
local checksum="${7:-}"
local file_count="${8:-}"
local start_epoch=$(date -d "$started_at" +%s 2>/dev/null || echo "")
local now_epoch=$(date +%s)
local duration_ms=""
if [[ -n "$start_epoch" ]]; then
duration_ms=$(( (now_epoch - start_epoch) * 1000 ))
fi
PGPASSWORD="$BACKUP_DB_PASSWORD" psql -h "$BACKUP_DB_HOST" -p "$BACKUP_DB_PORT" -U "$BACKUP_DB_USER" -d "$BACKUP_DB_NAME" -q <<EOF
UPDATE backup_logs SET
status = '$status',
completed_at = NOW(),
duration_ms = $([ -n "$duration_ms" ] && echo "$duration_ms" || echo "NULL"),
size_bytes = $([ -n "$size_bytes" ] && echo "$size_bytes" || echo "size_bytes"),
file_count = $([ -n "$file_count" ] && echo "$file_count" || echo "file_count"),
error_message = $([ -n "$error_message" ] && echo "'$(echo "$error_message" | sed "s/'/''/g")'" || echo "error_message"),
error_code = $([ -n "$error_code" ] && echo "'$error_code'" || echo "error_code"),
checksum = $([ -n "$checksum" ] && echo "'$checksum'" || echo "checksum")
WHERE source_name = '$source_name' AND started_at = '$started_at';
EOF
}
# Upload to Azure Blob Storage using azcopy
azure_upload() {
local local_file="$1"
local remote_path="$2"
export AZCOPY_AUTO_LOGIN_TYPE=AZCLI 2>/dev/null || true
# Use SAS token or account key authentication
if [[ -n "$AZURE_STORAGE_KEY" ]]; then
azcopy copy "$local_file" "${remote_path}?sv=2022-11-02&ss=b&srt=co&sp=rwdlaciytfx&se=2030-01-01T00:00:00Z&st=2024-01-01T00:00:00Z&spr=https&sig=placeholder" \
--blob-type BlockBlob \
--overwrite=true \
2>&1
else
# Fallback to az cli
az storage blob upload \
--account-name "$AZURE_STORAGE_ACCOUNT" \
--container-name "$AZURE_STORAGE_CONTAINER" \
--file "$local_file" \
--name "${AZURE_STORAGE_PATH}/$DATE/$(basename "$local_file")" \
--overwrite \
2>&1
fi
}
# Upload using az cli (more reliable)
azure_upload_az() {
local local_file="$1"
local blob_name="$2"
az storage blob upload \
--account-name "$AZURE_STORAGE_ACCOUNT" \
--account-key "$AZURE_STORAGE_KEY" \
--container-name "$AZURE_STORAGE_CONTAINER" \
--file "$local_file" \
--name "$blob_name" \
--overwrite \
--only-show-errors \
2>&1
}
# Backup a single repository
backup_repo() {
local repo_path="$1"
local repo_name=$(echo "$repo_path" | sed "s|$FORGEJO_REPO_PATH/||" | sed 's|\.git$||')
local safe_name=$(echo "$repo_name" | tr '/' '-')
local backup_file="$BACKUP_TEMP_DIR/${safe_name}_${TIMESTAMP}.tar.gz"
local blob_name="${AZURE_STORAGE_PATH}/$DATE/${safe_name}.tar.gz"
local remote_path="${AZURE_BLOB_URL}/$DATE/${safe_name}.tar.gz"
local started_at=$(date '+%Y-%m-%d %H:%M:%S')
log_info "Backing up: $repo_name"
# Log start
db_log "forgejo_repos" "$repo_name" "$repo_path" "azure_blob" "$remote_path" "running" "" "" "" "" "$started_at" ""
# Create tar.gz archive
if ! tar -czf "$backup_file" -C "$(dirname "$repo_path")" "$(basename "$repo_path")" 2>/tmp/backup_error_$$; then
local error_msg=$(cat /tmp/backup_error_$$ 2>/dev/null || echo "Unknown tar error")
rm -f /tmp/backup_error_$$
log_error "Failed to create archive for $repo_name: $error_msg"
db_update_status "$repo_name" "$started_at" "failed" "" "$error_msg" "TAR_FAILED" "" ""
return 1
fi
# Get file info
local size_bytes=$(stat -c%s "$backup_file" 2>/dev/null || stat -f%z "$backup_file" 2>/dev/null || echo "0")
local file_count=$(tar -tzf "$backup_file" 2>/dev/null | wc -l || echo "0")
local checksum=$(sha256sum "$backup_file" 2>/dev/null | cut -d' ' -f1 || shasum -a 256 "$backup_file" 2>/dev/null | cut -d' ' -f1 || echo "")
# Upload to Azure Blob Storage
if ! azure_upload_az "$backup_file" "$blob_name" 2>/tmp/backup_error_$$; then
local error_msg=$(cat /tmp/backup_error_$$ 2>/dev/null || echo "Unknown Azure upload error")
rm -f /tmp/backup_error_$$
log_error "Failed to upload $repo_name to Azure: $error_msg"
db_update_status "$repo_name" "$started_at" "failed" "$size_bytes" "$error_msg" "AZURE_UPLOAD_FAILED" "" "$file_count"
rm -f "$backup_file"
return 1
fi
rm -f /tmp/backup_error_$$
# Clean up local file
rm -f "$backup_file"
# Log success
db_update_status "$repo_name" "$started_at" "success" "$size_bytes" "" "" "$checksum" "$file_count"
log_info "Successfully backed up: $repo_name ($size_bytes bytes)"
return 0
}
# Clean up old remote backups
cleanup_old_backups() {
log_info "Cleaning up backups older than $BACKUP_RETENTION_DAYS days"
local cutoff_date=$(date -d "$BACKUP_RETENTION_DAYS days ago" +%Y-%m-%d 2>/dev/null || date -v-${BACKUP_RETENTION_DAYS}d +%Y-%m-%d)
# List blobs and filter by date prefix
az storage blob list \
--account-name "$AZURE_STORAGE_ACCOUNT" \
--account-key "$AZURE_STORAGE_KEY" \
--container-name "$AZURE_STORAGE_CONTAINER" \
--prefix "${AZURE_STORAGE_PATH}/" \
--query "[].name" \
--output tsv 2>/dev/null | while read -r blob_name; do
# Extract date from path (format: forgejo/2024-01-15/repo.tar.gz)
local blob_date=$(echo "$blob_name" | grep -oP '\d{4}-\d{2}-\d{2}' | head -1 || echo "")
if [[ -n "$blob_date" && "$blob_date" < "$cutoff_date" ]]; then
log_info "Deleting old backup: $blob_name"
az storage blob delete \
--account-name "$AZURE_STORAGE_ACCOUNT" \
--account-key "$AZURE_STORAGE_KEY" \
--container-name "$AZURE_STORAGE_CONTAINER" \
--name "$blob_name" \
--only-show-errors 2>/dev/null || true
fi
done
}
# Main backup function
main() {
log_info "Starting Forgejo backup (version $SCRIPT_VERSION)"
log_info "Repository path: $FORGEJO_REPO_PATH"
log_info "Destination: Azure Blob Storage ($AZURE_STORAGE_ACCOUNT/$AZURE_STORAGE_CONTAINER)"
# Verify configuration
if [[ ! -d "$FORGEJO_REPO_PATH" ]]; then
log_error "Repository path does not exist: $FORGEJO_REPO_PATH"
exit 1
fi
if [[ -z "$AZURE_STORAGE_ACCOUNT" ]]; then
log_error "AZURE_STORAGE_ACCOUNT is not set"
exit 1
fi
if [[ -z "$AZURE_STORAGE_KEY" ]]; then
log_error "AZURE_STORAGE_KEY is not set"
exit 1
fi
if ! command -v az &> /dev/null; then
log_error "Azure CLI (az) is not installed. Install with: curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash"
exit 1
fi
if ! command -v psql &> /dev/null; then
log_error "psql is not installed"
exit 1
fi
# Verify Azure connection
if ! az storage container show \
--account-name "$AZURE_STORAGE_ACCOUNT" \
--account-key "$AZURE_STORAGE_KEY" \
--name "$AZURE_STORAGE_CONTAINER" \
--only-show-errors &>/dev/null; then
log_error "Cannot connect to Azure Storage container: $AZURE_STORAGE_CONTAINER"
exit 1
fi
# Create temp directory
mkdir -p "$BACKUP_TEMP_DIR"
# Find and backup all repositories
local total=0
local success=0
local failed=0
while IFS= read -r -d '' repo; do
((total++)) || true
if backup_repo "$repo"; then
((success++)) || true
else
((failed++)) || true
fi
done < <(find "$FORGEJO_REPO_PATH" -maxdepth 3 -type d -name "*.git" -print0 2>/dev/null)
# Cleanup old backups
cleanup_old_backups
# Cleanup temp directory
rmdir "$BACKUP_TEMP_DIR" 2>/dev/null || true
# Summary
log_info "Backup complete: $total total, $success success, $failed failed"
if [[ $failed -gt 0 ]]; then
exit 1
fi
}
# Run main function
main "$@"