修改配置和response的细节

This commit is contained in:
2025-12-30 16:57:31 +08:00
parent 8972246445
commit d191b13455
5 changed files with 308 additions and 205 deletions

View File

@@ -1,7 +1,8 @@
# backend/main.py
from fastapi import FastAPI, APIRouter, BackgroundTasks
from .service import crawler_sql_service
from .workflow import workflow
# 确保导入路径与你的文件名一致,如果文件名是 workflow.py 则用 workflow
from .services.crawler_sql_service import crawler_sql_service
from .services.automated_crawler import workflow
from .schemas import (
RegisterRequest, PendingRequest, SaveResultsRequest, AddUrlsRequest, SearchRequest,
AutoMapRequest, AutoProcessRequest, TextSearchRequest
@@ -10,6 +11,11 @@ from .utils import make_response
app = FastAPI(title="Wiki Crawler API")
# ==========================================
# 工具函数
# ==========================================
# ==========================================
# V1 Router: 原始的底层接口 (Manual Control)
# ==========================================
@@ -18,8 +24,10 @@ router_v1 = APIRouter()
@router_v1.post("/register")
async def register(req: RegisterRequest):
try:
data = crawler_sql_service.register_task(req.url)
return make_response(1, "Success", data)
# Service 返回: {'task_id': 1, 'is_new_task': True, 'msg': '...'}
res = crawler_sql_service.register_task(req.url)
# 使用 pop 将 msg 提取出来作为响应的 msg剩下的作为 data
return make_response(1, res.pop("msg", "Success"), res)
except Exception as e:
return make_response(0, str(e))
@@ -27,44 +35,43 @@ async def register(req: RegisterRequest):
async def add_urls(req: AddUrlsRequest):
try:
urls = req.urls_obj["urls"]
data = crawler_sql_service.add_urls(req.task_id, urls=urls)
return make_response(1, "Success", data)
res = crawler_sql_service.add_urls(req.task_id, urls=urls)
return make_response(1, res.pop("msg", "Success"), res)
except Exception as e:
return make_response(0, str(e))
@router_v1.post("/pending_urls")
async def pending_urls(req: PendingRequest):
try:
data = crawler_sql_service.get_pending_urls(req.task_id, req.limit)
msg = "Success" if data["urls"] else "Queue Empty"
return make_response(1, msg, data)
res = crawler_sql_service.get_pending_urls(req.task_id, req.limit)
# 即使队列为空Service 也会返回 msg="Queue is empty"
return make_response(1, res.pop("msg", "Success"), res)
except Exception as e:
return make_response(0, str(e))
@router_v1.post("/save_results")
async def save_results(req: SaveResultsRequest):
try:
data = crawler_sql_service.save_results(req.task_id, req.results)
return make_response(1, "Success", data)
res = crawler_sql_service.save_results(req.task_id, req.results)
return make_response(1, res.pop("msg", "Success"), res)
except Exception as e:
return make_response(0, str(e))
@router_v1.post("/search")
async def search_v1(req: SearchRequest):
"""V1 搜索:需要客户端自己传向量"""
"""V1 搜索:客户端手动传向量"""
try:
vector = req.query_embedding['vector']
# 注意这里需要确认你数据库的向量维度。TextEmbedding V3 可能是 1024V2 是 1536。
# 请根据你的 PGVector 设置进行匹配。
if not vector:
return make_response(2, "Vector is empty", None)
data = crawler_sql_service.search_knowledge(
# Service 现在返回 {'results': [...], 'msg': 'Found ...'}
res = crawler_sql_service.search_knowledge(
query_embedding=vector,
task_id=req.task_id,
limit=req.limit
)
return make_response(1, "Search Done", data)
return make_response(1, res.pop("msg", "Search Done"), res)
except Exception as e:
return make_response(0, str(e))
@@ -75,16 +82,14 @@ async def search_v1(req: SearchRequest):
router_v2 = APIRouter()
@router_v2.post("/auto/map")
async def auto_map(req: AutoMapRequest, background_tasks: BackgroundTasks):
async def auto_map(req: AutoMapRequest):
"""
[步] 输入首页 URL自动调用 Firecrawl Map 并入库
[步] 输入首页 URL自动调用 Firecrawl Map 并入库
"""
# 也可以放入 background_tasks但 map 通常比较快这里演示同步返回任务ID
try:
# 为了不阻塞主线程,如果 map 很慢,建议放入 background_tasks
# 这里为了能立刻看到 task_id先同步调用 (Firecrawl Map 比较快)
data = workflow.map_and_ingest(req.url)
return make_response(1, "Mapping Started", data)
# Workflow 返回: {'task_id':..., 'msg': 'Task mapped...', ...}
res = workflow.map_and_ingest(req.url)
return make_response(1, res.pop("msg", "Mapping Started"), res)
except Exception as e:
return make_response(0, str(e))
@@ -93,9 +98,14 @@ async def auto_process(req: AutoProcessRequest, background_tasks: BackgroundTask
"""
[异步] 触发后台任务:消费队列 -> 抓取 -> Embedding -> 入库
"""
# 将耗时操作放入后台任务
background_tasks.add_task(workflow.process_task_queue, req.task_id, req.batch_size)
return make_response(1, "Processing started in background", {"task_id": req.task_id})
try:
# 将耗时操作放入后台任务
background_tasks.add_task(workflow.process_task_queue, req.task_id, req.batch_size)
# 因为是后台任务,无法立即获取 Service 的返回值 msg只能返回通用消息
return make_response(1, "Background processing started", {"task_id": req.task_id})
except Exception as e:
return make_response(0, str(e))
@router_v2.post("/search")
async def search_v2(req: TextSearchRequest):
@@ -103,8 +113,9 @@ async def search_v2(req: TextSearchRequest):
[智能] 输入自然语言文本 -> 后端转向量 -> 搜索
"""
try:
data = workflow.search_with_embedding(req.query, req.task_id, req.limit)
return make_response(1, "Search Success", data)
# Workflow 返回 {'results': [...], 'msg': '...'}
res = workflow.search_with_embedding(req.query, req.task_id, req.limit)
return make_response(1, res.pop("msg", "Search Success"), res)
except Exception as e:
return make_response(0, f"Search Failed: {str(e)}")