mcp调试完成
This commit is contained in:
@@ -1,64 +1,145 @@
|
||||
import sys
|
||||
import os
|
||||
import asyncio
|
||||
# 路径兼容
|
||||
import logging
|
||||
from typing import Optional # 确保引入 Optional
|
||||
import threading
|
||||
# 1. 路径兼容 (确保能找到 backend 包)
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
from backend.core.logger import setup_logging
|
||||
|
||||
# 初始化日志 (写在 FastMCP 初始化之前)
|
||||
setup_logging()
|
||||
from mcp.server.fastmcp import FastMCP
|
||||
from backend.core.logger import setup_logging
|
||||
from backend.services.crawler_service import crawler_service
|
||||
|
||||
# 2. 初始化日志 (必须走 stderr)
|
||||
setup_logging()
|
||||
logger = logging.getLogger("mcp_server")
|
||||
|
||||
# 3. 初始化 MCP 服务
|
||||
mcp = FastMCP("WikiCrawler-V3")
|
||||
|
||||
@mcp.tool()
|
||||
async def kb_add_website(url: str) -> str:
|
||||
"""[Admin] Add a website map task."""
|
||||
"""
|
||||
[Admin] Input a URL to map and register a task.
|
||||
This is the first step to add a knowledge base.
|
||||
|
||||
Args:
|
||||
url: The root URL of the website (e.g., https://docs.firecrawl.dev).
|
||||
|
||||
Returns:
|
||||
Task ID and count of found links.
|
||||
"""
|
||||
try:
|
||||
res = crawler_service.map_site(url)
|
||||
return f"Task Registered. ID: {res['task_id']}, Links Found: {res['count']}"
|
||||
return f"Task Registered. ID: {res['task_id']}, Links Found: {res['count']}, Is New: {res['is_new']}"
|
||||
except Exception as e:
|
||||
logger.error(f"Add website failed: {e}", exc_info=True)
|
||||
return f"Error: {e}"
|
||||
|
||||
@mcp.tool()
|
||||
async def kb_check_status(task_id: int) -> str:
|
||||
"""[Monitor] Check detailed progress and active threads."""
|
||||
"""
|
||||
[Monitor] Check detailed progress and active threads.
|
||||
Use this to see if the crawler is still running or finished.
|
||||
|
||||
Args:
|
||||
task_id: The ID of the task to check.
|
||||
|
||||
Returns:
|
||||
A formatted report including progress stats and currently crawling URLs.
|
||||
"""
|
||||
data = crawler_service.get_task_status(task_id)
|
||||
if not data: return "Task not found."
|
||||
|
||||
s = data['stats']
|
||||
threads = data['active_threads']
|
||||
|
||||
# 格式化输出给 LLM 阅读
|
||||
report = (
|
||||
f"--- Task {task_id} Status ---\n"
|
||||
f"Root URL: {data['root_url']}\n"
|
||||
f"Progress: {s['completed']}/{s['total']} (Pending: {s['pending']})\n"
|
||||
f"Active Threads: {len(threads)}\n"
|
||||
f"Active Threads (Running): {len(threads)}\n"
|
||||
)
|
||||
|
||||
if threads:
|
||||
report += "Currently Crawling:\n" + "\n".join([f"- {t}" for t in threads[:5]])
|
||||
if len(threads) > 5:
|
||||
report += f"\n... and {len(threads)-5} more."
|
||||
|
||||
return report
|
||||
|
||||
@mcp.tool()
|
||||
async def kb_run_crawler(task_id: int, batch_size: int = 5) -> str:
|
||||
"""[Action] Trigger crawler batch."""
|
||||
# MCP 同步调用以获得反馈
|
||||
res = crawler_service.process_queue_concurrent(task_id, batch_size)
|
||||
return f"Batch Finished. Count: {res.get('count', 0)}"
|
||||
async def kb_run_crawler(task_id: int, batch_size: int = 20) -> str:
|
||||
"""
|
||||
[Action] Trigger the crawler in BACKGROUND mode.
|
||||
This returns immediately, so you can use 'kb_check_status' to monitor progress.
|
||||
|
||||
Args:
|
||||
task_id: The ID of the task.
|
||||
batch_size: Number of URLs to process (suggest 10-20).
|
||||
|
||||
Returns:
|
||||
Status message confirming start.
|
||||
"""
|
||||
# 定义一个在后台跑的包装函数
|
||||
def background_task():
|
||||
try:
|
||||
logger.info(f"Background batch started for Task {task_id}")
|
||||
# 这里是阻塞操作,但它现在跑在独立线程里
|
||||
crawler_service.process_queue_concurrent(task_id, batch_size)
|
||||
logger.info(f"Background batch finished for Task {task_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Background task failed: {e}", exc_info=True)
|
||||
|
||||
# 2. 创建并启动线程
|
||||
thread = threading.Thread(target=background_task)
|
||||
thread.daemon = True # 设置为守护线程,防止主程序退出时卡死
|
||||
thread.start()
|
||||
|
||||
# 3. 立即返回,不等待爬取结束
|
||||
return f"🚀 Background crawler started for Task {task_id} (Batch Size: {batch_size}). You can now check status."
|
||||
|
||||
|
||||
@mcp.tool()
|
||||
async def kb_search(query: str, task_id: int = None) -> str:
|
||||
"""[User] Search knowledge base."""
|
||||
res = crawler_service.search(query, task_id, 5)
|
||||
results = res.get('results', [])
|
||||
if not results: return "No results."
|
||||
async def kb_search(query: str, task_id: Optional[int] = None, limit: int = 5) -> str:
|
||||
"""
|
||||
[User] Search knowledge base with Hybrid Search & Rerank.
|
||||
|
||||
output = []
|
||||
for i, r in enumerate(results):
|
||||
score_display = f"{r['score']:.4f}" + (" (Reranked)" if r.get('reranked') else "")
|
||||
meta = r.get('meta_info', {})
|
||||
path = meta.get('header_path', 'Root')
|
||||
output.append(f"[{i+1}] Score: {score_display}\nPath: {path}\nContent: {r['content'][:200]}...")
|
||||
return "\n\n".join(output)
|
||||
Args:
|
||||
query: The user's question or search keywords.
|
||||
task_id: (Optional) Limit search to a specific task ID.
|
||||
limit: (Optional) Number of results to return (default 5).
|
||||
|
||||
Returns:
|
||||
Ranked content blocks with source paths.
|
||||
"""
|
||||
try:
|
||||
res = crawler_service.search(query, task_id, limit)
|
||||
results = res.get('results', [])
|
||||
|
||||
if not results: return "No results found."
|
||||
|
||||
output = []
|
||||
for i, r in enumerate(results):
|
||||
score_display = f"{r['score']:.4f}" + (" (Reranked)" if r.get('reranked') else "")
|
||||
meta = r.get('meta_info', {})
|
||||
path = meta.get('header_path', 'Root')
|
||||
|
||||
# 格式化单个结果块
|
||||
block = (
|
||||
f"[{i+1}] Score: {score_display}\n"
|
||||
f"Path: {path}\n"
|
||||
f"Content: {r['content'][:300]}..." # 限制长度防止 Context 溢出
|
||||
)
|
||||
output.append(block)
|
||||
|
||||
return "\n\n".join(output)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Search failed: {e}", exc_info=True)
|
||||
return f"Search Error: {e}"
|
||||
|
||||
if __name__ == "__main__":
|
||||
# 启动 MCP 服务
|
||||
mcp.run()
|
||||
Reference in New Issue
Block a user