Files
wiki_crawler/backend/mcp_server.py

64 lines
2.1 KiB
Python

import sys
import os
import asyncio
# 路径兼容
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from backend.core.logger import setup_logging
# 初始化日志 (写在 FastMCP 初始化之前)
setup_logging()
from mcp.server.fastmcp import FastMCP
from backend.services.crawler_service import crawler_service
mcp = FastMCP("WikiCrawler-V3")
@mcp.tool()
async def kb_add_website(url: str) -> str:
"""[Admin] Add a website map task."""
try:
res = crawler_service.map_site(url)
return f"Task Registered. ID: {res['task_id']}, Links Found: {res['count']}"
except Exception as e:
return f"Error: {e}"
@mcp.tool()
async def kb_check_status(task_id: int) -> str:
"""[Monitor] Check detailed progress and active threads."""
data = crawler_service.get_task_status(task_id)
if not data: return "Task not found."
s = data['stats']
threads = data['active_threads']
report = (
f"Progress: {s['completed']}/{s['total']} (Pending: {s['pending']})\n"
f"Active Threads: {len(threads)}\n"
)
if threads:
report += "Currently Crawling:\n" + "\n".join([f"- {t}" for t in threads[:5]])
return report
@mcp.tool()
async def kb_run_crawler(task_id: int, batch_size: int = 5) -> str:
"""[Action] Trigger crawler batch."""
# MCP 同步调用以获得反馈
res = crawler_service.process_queue_concurrent(task_id, batch_size)
return f"Batch Finished. Count: {res.get('count', 0)}"
@mcp.tool()
async def kb_search(query: str, task_id: int = None) -> str:
"""[User] Search knowledge base."""
res = crawler_service.search(query, task_id, 5)
results = res.get('results', [])
if not results: return "No results."
output = []
for i, r in enumerate(results):
score_display = f"{r['score']:.4f}" + (" (Reranked)" if r.get('reranked') else "")
meta = r.get('meta_info', {})
path = meta.get('header_path', 'Root')
output.append(f"[{i+1}] Score: {score_display}\nPath: {path}\nContent: {r['content'][:200]}...")
return "\n\n".join(output)
if __name__ == "__main__":
mcp.run()