Ultima attività 1744609663

pg数据库自动备份恢复

Revisione be9aa43f6ee35c05919ae106253791f4855afaff

pg_backup_s3.py Raw
1#!/usr/bin/env python3
2#推荐自行搭配定时执行工具使用
3#此脚本功能为pg数据库备份至s3
4#使用前请自行修改配置信息(请自行安装py环境及依赖)
5#使用方法:
6#1. 配置信息
7#2. 检查前置条件
8#3. 创建压缩备份
9#4. 上传备份文件到S3
10#5. 清理旧备份
11#6. 日志记录
12#7. 异常处理
13
14# 配置信息
15import os
16import subprocess
17import boto3
18from botocore.exceptions import ClientError
19from datetime import datetime
20import logging
21import gzip
22import shutil
23from boto3.s3.transfer import TransferConfig
24
25# 配置信息 请自行修改
26DB_NAME = 'database_name'
27DB_USER = 'database_user'
28DB_PASSWORD = 'database_password'
29S3_ENDPOINT = '你的存储桶域端点'
30S3_ACCESS_KEY = '你的存储桶访问ACCESS_KEY'
31S3_SECRET_KEY = '你的存储桶访问SECRET_KEY'
32S3_BUCKET = '你的存储桶名称'
33BACKUP_DIR = '/tmp/pg_backups' # 备份文件存储目录
34COMPRESS_LEVEL = 6 # 压缩级别 (0-9), 0为不压缩, 9为最大压缩,不懂不要修改
35
36# 日志设置
37logging.basicConfig(
38 level=logging.INFO,
39 format='%(asctime)s - %(levelname)s - %(message)s',
40 datefmt='%Y-%m-%d %H:%M:%S',
41 handlers=[
42 logging.StreamHandler(),
43 logging.FileHandler('/var/log/pg_backup_compressed.log')
44 ]
45)
46logger = logging.getLogger('PG_Backup_Compressed')
47
48def print_step(message):
49 print(f"{message}")
50
51def check_prerequisites():
52 """检查前置条件"""
53 try:
54 os.makedirs(BACKUP_DIR, exist_ok=True)
55 test_file = os.path.join(BACKUP_DIR, '.test')
56 with open(test_file, 'w') as f:
57 f.write('test')
58 os.remove(test_file)
59 subprocess.run(['pg_dump', '--version'], check=True, capture_output=True)
60 return True
61 except Exception as e:
62 logger.error(f"前置条件检查失败: {str(e)}")
63 return False
64
65def create_compressed_backup():
66 """创建压缩备份"""
67 timestamp = datetime.now().strftime("%m%d%H%M")
68 sql_file = os.path.join(BACKUP_DIR, f"{DB_NAME}_backup_{timestamp}.sql")
69 gz_file = f"{sql_file}.gz"
70
71 try:
72 print_step("正在执行pg_dump...")
73 env = os.environ.copy()
74 env['PGPASSWORD'] = DB_PASSWORD
75 cmd = [
76 'pg_dump',
77 '-U', DB_USER,
78 '-h', 'localhost',
79 '-d', DB_NAME,
80 '-f', sql_file
81 ]
82
83 result = subprocess.run(
84 cmd,
85 env=env,
86 stdout=subprocess.PIPE,
87 stderr=subprocess.PIPE,
88 text=True
89 )
90
91 if result.returncode != 0:
92 raise Exception(f"pg_dump失败: {result.stderr.strip()}")
93
94 if not os.path.exists(sql_file):
95 raise Exception("SQL文件未生成")
96
97 print_step("正在压缩备份文件...")
98 with open(sql_file, 'rb') as f_in:
99 with gzip.open(gz_file, 'wb', compresslevel=COMPRESS_LEVEL) as f_out:
100 shutil.copyfileobj(f_in, f_out)
101
102 os.remove(sql_file)
103 return gz_file
104
105 except Exception as e:
106 for f in [sql_file, gz_file]:
107 if os.path.exists(f):
108 try:
109 os.remove(f)
110 except:
111 pass
112 raise
113
114class ProgressPercentage:
115 """上传进度显示"""
116 def __init__(self, filename):
117 self._filename = filename
118 self._size = float(os.path.getsize(filename))
119 self._seen_so_far = 0
120
121 def __call__(self, bytes_amount):
122 self._seen_so_far += bytes_amount
123 percentage = (self._seen_so_far / self._size) * 100
124 print(f"\r 上传进度: {percentage:.2f}% ({self._seen_so_far/1024/1024:.2f}MB)", end='')
125
126def upload_to_s3(file_path):
127 """上传到S3"""
128 try:
129 s3 = boto3.client(
130 's3',
131 endpoint_url=S3_ENDPOINT,
132 aws_access_key_id=S3_ACCESS_KEY,
133 aws_secret_access_key=S3_SECRET_KEY,
134 region_name='cn-sy1',
135 config=boto3.session.Config(
136 signature_version='s3v4'
137 )
138 )
139
140 transfer_config = TransferConfig(
141 multipart_threshold=1024*25,
142 max_concurrency=10,
143 multipart_chunksize=1024*25,
144 use_threads=True
145 )
146
147 file_name = os.path.basename(file_path)
148 print_step(f"正在上传 {file_name}...")
149
150 s3.upload_file(
151 file_path,
152 S3_BUCKET,
153 file_name,
154 Config=transfer_config,
155 Callback=ProgressPercentage(file_path)
156 )
157
158 return True
159 except Exception as e:
160 raise
161
162def main():
163 print("\n" + "="*50)
164 print("PostgreSQL 压缩备份脚本")
165 print("="*50 + "\n")
166
167 try:
168 if not check_prerequisites():
169 raise Exception("前置条件检查未通过")
170
171 backup_file = create_compressed_backup()
172 if upload_to_s3(backup_file):
173 os.remove(backup_file)
174 print_step("上传成功,已清理本地文件")
175
176 except Exception as e:
177 logger.error(f"备份失败: {str(e)}")
178 print_step(f"[错误] {str(e)}")
179 finally:
180 print("\n[操作完成]")
181
182if __name__ == "__main__":
183 main()