Files
telegram-customer-bot/database.py.bak
2025-11-01 21:58:31 +08:00

238 lines
7.1 KiB
Python
Executable File

#!/usr/bin/env python3
"""
数据库管理模块 - SQLite缓存系统
"""
import sqlite3
import json
import hashlib
from datetime import datetime, timedelta
from typing import Optional, List, Dict
import logging
logger = logging.getLogger(__name__)
class CacheDatabase:
"""缓存数据库管理"""
def __init__(self, db_path="/home/atai/bot_data/cache.db"):
self.db_path = db_path
self.init_database()
def init_database(self):
"""初始化数据库表"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# 创建缓存表
cursor.execute("""
CREATE TABLE IF NOT EXISTS search_cache (
id INTEGER PRIMARY KEY AUTOINCREMENT,
command TEXT NOT NULL,
keyword TEXT NOT NULL,
page INTEGER NOT NULL,
result_text TEXT,
result_html TEXT,
buttons_json TEXT,
result_hash TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
expires_at TIMESTAMP,
access_count INTEGER DEFAULT 0,
last_accessed TIMESTAMP
)
""")
# 创建唯一索引(防止重复)
cursor.execute("""
CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_cache
ON search_cache(command, keyword, page, result_hash)
""")
# 创建查询索引
cursor.execute("""
CREATE INDEX IF NOT EXISTS idx_search
ON search_cache(command, keyword, page)
""")
# 创建过期时间索引
cursor.execute("""
CREATE INDEX IF NOT EXISTS idx_expires
ON search_cache(expires_at)
""")
conn.commit()
conn.close()
logger.info("数据库初始化完成")
def get_cache(self, command: str, keyword: str, page: int) -> Optional[Dict]:
"""
获取缓存
返回: {"text": str, "html": str, "buttons": list} 或 None
"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("""
SELECT result_text, result_html, buttons_json, id
FROM search_cache
WHERE command = ? AND keyword = ? AND page = ?
AND (expires_at IS NULL OR expires_at > ?)
ORDER BY created_at DESC
LIMIT 1
""", (command, keyword, page, datetime.now()))
row = cursor.fetchone()
if row:
# 更新访问统计
cursor.execute("""
UPDATE search_cache
SET access_count = access_count + 1,
last_accessed = ?
WHERE id = ?
""", (datetime.now(), row[3]))
conn.commit()
result = {
"text": row[0],
"html": row[1],
"buttons": json.loads(row[2]) if row[2] else None
}
conn.close()
logger.info(f"缓存命中: {command} {keyword} page{page}")
return result
conn.close()
return None
def save_cache(self, command: str, keyword: str, page: int,
result_text: str, buttons_or_html = None,
buttons: list = None, expiry_days: int = 30):
"""
保存缓存 - 兼容两种调用方式
"""
# 兼容老代码: save_cache(cmd, keyword, page, text, buttons)
if isinstance(buttons_or_html, list):
buttons = buttons_or_html
result_html = None
else:
result_html = buttons_or_html
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# 计算结果哈希(用于去重)
result_hash = hashlib.md5(result_text.encode()).hexdigest()
# 计算过期时间
expires_at = datetime.now() + timedelta(days=expiry_days)
# 按钮JSON化
buttons_json = json.dumps(buttons, ensure_ascii=False) if buttons else None
try:
cursor.execute("""
INSERT OR REPLACE INTO search_cache
(command, keyword, page, result_text, result_html, buttons_json,
result_hash, expires_at, last_accessed)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (command, keyword, page, result_text, result_html, buttons_json,
result_hash, expires_at, datetime.now()))
conn.commit()
logger.info(f"缓存已保存: {command} {keyword} page{page}")
return True
except sqlite3.IntegrityError as e:
logger.debug(f"缓存已存在(去重): {e}")
return False
finally:
conn.close()
def clean_expired(self):
"""清理过期缓存"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("""
DELETE FROM search_cache
WHERE expires_at IS NOT NULL AND expires_at < ?
""", (datetime.now(),))
deleted = cursor.rowcount
conn.commit()
conn.close()
if deleted > 0:
logger.info(f"清理过期缓存: {deleted}")
return deleted
def get_stats(self) -> Dict:
"""获取统计信息"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
# 总缓存数
cursor.execute("SELECT COUNT(*) FROM search_cache")
total = cursor.fetchone()[0]
# 有效缓存数
cursor.execute("""
SELECT COUNT(*) FROM search_cache
WHERE expires_at IS NULL OR expires_at > ?
""", (datetime.now(),))
valid = cursor.fetchone()[0]
# 过期缓存数
expired = total - valid
# 最常访问
cursor.execute("""
SELECT command, keyword, access_count
FROM search_cache
ORDER BY access_count DESC
LIMIT 10
""")
top_accessed = cursor.fetchall()
conn.close()
return {
"total": total,
"valid": valid,
"expired": expired,
"top_accessed": [
{"command": row[0], "keyword": row[1], "count": row[2]}
for row in top_accessed
]
}
def search_history(self, keyword: str, limit: int = 20) -> List[Dict]:
"""搜索历史记录"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("""
SELECT DISTINCT command, keyword, MAX(page) as max_page,
COUNT(*) as page_count, MAX(created_at) as latest
FROM search_cache
WHERE keyword LIKE ?
GROUP BY command, keyword
ORDER BY latest DESC
LIMIT ?
""", (f"%{keyword}%", limit))
results = []
for row in cursor.fetchall():
results.append({
"command": row[0],
"keyword": row[1],
"total_pages": row[2],
"cached_pages": row[3],
"latest_update": row[4]
})
conn.close()
return results