Files
wiki_crawler/backend/database.py
2025-12-20 17:08:54 +08:00

22 lines
823 B
Python

from sqlalchemy import create_engine, MetaData, Table
from .config import settings
class Database:
def __init__(self):
self.engine = create_engine(settings.DATABASE_URL, pool_pre_ping=True)
self.metadata = MetaData()
self.tasks = None
self.queue = None
self.chunks = None
self._reflect_tables()
def _reflect_tables(self):
try:
# 自动从数据库加载表结构
self.tasks = Table('crawl_tasks', self.metadata, autoload_with=self.engine)
self.queue = Table('crawl_queue', self.metadata, autoload_with=self.engine)
self.chunks = Table('knowledge_chunks', self.metadata, autoload_with=self.engine)
except Exception as e:
print(f"❌ 数据库表加载失败: {e}")
db_instance = Database()