Files
你的用户名 237c7802e5
Some checks failed
Deploy / deploy (push) Has been cancelled
Initial commit: Telegram Management System
Full-stack web application for Telegram management
- Frontend: Vue 3 + Vben Admin
- Backend: NestJS
- Features: User management, group broadcast, statistics

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-04 15:37:50 +08:00

360 lines
10 KiB
Bash
Executable File
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/bin/bash
# Telegram Marketing Agent System - Backup Script
# Automated backup for databases and configurations
set -e
# Configuration
BACKUP_DIR="/backup/marketing-agent"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_PATH="$BACKUP_DIR/$TIMESTAMP"
RETENTION_DAYS=30
# Colors
GREEN='\033[0;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Functions
print_success() {
echo -e "${GREEN}$1${NC}"
}
print_error() {
echo -e "${RED}$1${NC}"
}
print_info() {
echo -e " $1"
}
# Create backup directory
create_backup_dir() {
mkdir -p "$BACKUP_PATH"/{mongodb,postgres,redis,config,logs}
print_success "Created backup directory: $BACKUP_PATH"
}
# Backup MongoDB
backup_mongodb() {
print_info "Backing up MongoDB..."
docker exec marketing_mongodb mongodump \
--uri="mongodb://localhost:27017/marketing_agent" \
--out=/tmp/mongodb_backup \
--gzip
docker cp marketing_mongodb:/tmp/mongodb_backup "$BACKUP_PATH/mongodb/"
docker exec marketing_mongodb rm -rf /tmp/mongodb_backup
print_success "MongoDB backup completed"
}
# Backup PostgreSQL
backup_postgres() {
print_info "Backing up PostgreSQL..."
docker exec marketing_postgres pg_dump \
-U marketing_user \
-d marketing_agent \
--clean \
--if-exists \
-f /tmp/postgres_backup.sql
docker cp marketing_postgres:/tmp/postgres_backup.sql "$BACKUP_PATH/postgres/"
docker exec marketing_postgres rm /tmp/postgres_backup.sql
# Compress the dump
gzip "$BACKUP_PATH/postgres/postgres_backup.sql"
print_success "PostgreSQL backup completed"
}
# Backup Redis
backup_redis() {
print_info "Backing up Redis..."
docker exec marketing_redis redis-cli BGSAVE
sleep 5 # Wait for background save to complete
docker cp marketing_redis:/data/dump.rdb "$BACKUP_PATH/redis/"
print_success "Redis backup completed"
}
# Backup configuration files
backup_configs() {
print_info "Backing up configuration files..."
# Copy important config files
cp .env "$BACKUP_PATH/config/" 2>/dev/null || true
cp docker-compose.yml "$BACKUP_PATH/config/"
cp docker-compose.prod.yml "$BACKUP_PATH/config/" 2>/dev/null || true
# Copy service configs
for service in services/*; do
if [ -d "$service" ]; then
service_name=$(basename "$service")
mkdir -p "$BACKUP_PATH/config/services/$service_name"
cp "$service/package.json" "$BACKUP_PATH/config/services/$service_name/" 2>/dev/null || true
cp -r "$service/src/config" "$BACKUP_PATH/config/services/$service_name/" 2>/dev/null || true
fi
done
print_success "Configuration backup completed"
}
# Backup logs
backup_logs() {
print_info "Backing up logs..."
# Get logs from containers
for container in $(docker-compose ps -q); do
container_name=$(docker inspect -f '{{.Name}}' "$container" | sed 's/^\/marketing_//')
docker logs "$container" > "$BACKUP_PATH/logs/${container_name}.log" 2>&1
done
# Compress logs
tar -czf "$BACKUP_PATH/logs.tar.gz" -C "$BACKUP_PATH" logs/
rm -rf "$BACKUP_PATH/logs"
print_success "Logs backup completed"
}
# Create backup metadata
create_metadata() {
cat > "$BACKUP_PATH/backup_metadata.json" <<EOF
{
"timestamp": "$TIMESTAMP",
"date": "$(date)",
"version": "$(git rev-parse HEAD 2>/dev/null || echo 'unknown')",
"services": {
"mongodb": "$(docker exec marketing_mongodb mongod --version | head -n1)",
"postgres": "$(docker exec marketing_postgres postgres --version)",
"redis": "$(docker exec marketing_redis redis-server --version | head -n1)"
},
"containers": $(docker-compose ps --format json | jq -s '.')
}
EOF
print_success "Backup metadata created"
}
# Compress backup
compress_backup() {
print_info "Compressing backup..."
cd "$BACKUP_DIR"
tar -czf "marketing-agent-backup-$TIMESTAMP.tar.gz" "$TIMESTAMP/"
# Calculate size
SIZE=$(du -h "marketing-agent-backup-$TIMESTAMP.tar.gz" | cut -f1)
print_success "Backup compressed: marketing-agent-backup-$TIMESTAMP.tar.gz ($SIZE)"
}
# Upload to S3 (optional)
upload_to_s3() {
if [ ! -z "$AWS_S3_BUCKET" ]; then
print_info "Uploading to S3..."
aws s3 cp "$BACKUP_DIR/marketing-agent-backup-$TIMESTAMP.tar.gz" \
"s3://$AWS_S3_BUCKET/backups/" \
--storage-class STANDARD_IA
print_success "Backup uploaded to S3"
fi
}
# Clean old backups
clean_old_backups() {
print_info "Cleaning old backups..."
# Remove local backups older than retention period
find "$BACKUP_DIR" -name "marketing-agent-backup-*.tar.gz" -mtime +$RETENTION_DAYS -delete
find "$BACKUP_DIR" -maxdepth 1 -type d -name "20*" -mtime +$RETENTION_DAYS -exec rm -rf {} \;
# Clean S3 backups if configured
if [ ! -z "$AWS_S3_BUCKET" ]; then
aws s3 ls "s3://$AWS_S3_BUCKET/backups/" | \
while read -r line; do
createDate=$(echo $line | awk '{print $1" "$2}')
createDate=$(date -d "$createDate" +%s)
olderThan=$(date -d "$RETENTION_DAYS days ago" +%s)
if [[ $createDate -lt $olderThan ]]; then
fileName=$(echo $line | awk '{print $4}')
if [[ $fileName != "" ]]; then
aws s3 rm "s3://$AWS_S3_BUCKET/backups/$fileName"
fi
fi
done
fi
print_success "Old backups cleaned"
}
# Restore from backup
restore_backup() {
BACKUP_FILE=$1
if [ -z "$BACKUP_FILE" ]; then
echo "Usage: $0 restore <backup-file>"
exit 1
fi
if [ ! -f "$BACKUP_FILE" ]; then
print_error "Backup file not found: $BACKUP_FILE"
exit 1
fi
print_info "Restoring from backup: $BACKUP_FILE"
# Extract backup
RESTORE_DIR="/tmp/restore_$(date +%s)"
mkdir -p "$RESTORE_DIR"
tar -xzf "$BACKUP_FILE" -C "$RESTORE_DIR"
# Find the extracted directory
BACKUP_CONTENT=$(ls "$RESTORE_DIR")
# Stop services
print_info "Stopping services..."
docker-compose stop
# Restore MongoDB
if [ -d "$RESTORE_DIR/$BACKUP_CONTENT/mongodb" ]; then
print_info "Restoring MongoDB..."
docker-compose up -d mongodb
sleep 10
docker cp "$RESTORE_DIR/$BACKUP_CONTENT/mongodb" marketing_mongodb:/tmp/
docker exec marketing_mongodb mongorestore \
--uri="mongodb://localhost:27017/marketing_agent" \
--drop \
--gzip \
/tmp/mongodb/marketing_agent
print_success "MongoDB restored"
fi
# Restore PostgreSQL
if [ -f "$RESTORE_DIR/$BACKUP_CONTENT/postgres/postgres_backup.sql.gz" ]; then
print_info "Restoring PostgreSQL..."
docker-compose up -d postgres
sleep 10
gunzip -c "$RESTORE_DIR/$BACKUP_CONTENT/postgres/postgres_backup.sql.gz" > "$RESTORE_DIR/postgres_backup.sql"
docker cp "$RESTORE_DIR/postgres_backup.sql" marketing_postgres:/tmp/
docker exec marketing_postgres psql -U marketing_user -d marketing_agent -f /tmp/postgres_backup.sql
print_success "PostgreSQL restored"
fi
# Restore Redis
if [ -f "$RESTORE_DIR/$BACKUP_CONTENT/redis/dump.rdb" ]; then
print_info "Restoring Redis..."
docker-compose stop redis
docker cp "$RESTORE_DIR/$BACKUP_CONTENT/redis/dump.rdb" marketing_redis:/data/
docker-compose up -d redis
print_success "Redis restored"
fi
# Clean up
rm -rf "$RESTORE_DIR"
# Start all services
print_info "Starting all services..."
docker-compose up -d
print_success "Restore completed successfully"
}
# Verify backup
verify_backup() {
BACKUP_FILE=$1
if [ -z "$BACKUP_FILE" ]; then
echo "Usage: $0 verify <backup-file>"
exit 1
fi
if [ ! -f "$BACKUP_FILE" ]; then
print_error "Backup file not found: $BACKUP_FILE"
exit 1
fi
print_info "Verifying backup: $BACKUP_FILE"
# Test extraction
tar -tzf "$BACKUP_FILE" > /dev/null 2>&1
if [ $? -eq 0 ]; then
print_success "Backup file is valid"
# Show contents
echo ""
echo "Backup contents:"
tar -tzf "$BACKUP_FILE" | head -20
echo "..."
# Show metadata if exists
TEMP_DIR="/tmp/verify_$(date +%s)"
mkdir -p "$TEMP_DIR"
tar -xzf "$BACKUP_FILE" -C "$TEMP_DIR" --wildcards "*/backup_metadata.json" 2>/dev/null
if [ -f "$TEMP_DIR"/*/backup_metadata.json ]; then
echo ""
echo "Backup metadata:"
cat "$TEMP_DIR"/*/backup_metadata.json | jq '.'
fi
rm -rf "$TEMP_DIR"
else
print_error "Backup file is corrupted"
exit 1
fi
}
# Main execution
case "$1" in
"restore")
restore_backup "$2"
;;
"verify")
verify_backup "$2"
;;
"")
# Normal backup operation
echo "Starting backup process..."
echo "========================="
create_backup_dir
backup_mongodb
backup_postgres
backup_redis
backup_configs
backup_logs
create_metadata
compress_backup
upload_to_s3
clean_old_backups
# Remove uncompressed backup
rm -rf "$BACKUP_PATH"
echo ""
echo "========================="
print_success "Backup completed successfully!"
echo "Backup file: $BACKUP_DIR/marketing-agent-backup-$TIMESTAMP.tar.gz"
;;
*)
echo "Usage: $0 [restore|verify] [backup-file]"
echo ""
echo "Commands:"
echo " $0 Create a new backup"
echo " $0 restore <file> Restore from backup file"
echo " $0 verify <file> Verify backup file integrity"
exit 1
;;
esac