Files
wiki_crawler/backend/routers/v2.py

30 lines
1.2 KiB
Python
Raw Normal View History

2026-01-13 01:37:26 +08:00
from fastapi import APIRouter, BackgroundTasks
from backend.services.crawler_service import crawler_service
from backend.utils.common import make_response
from backend.schemas.schemas import AutoMapRequest, AutoProcessRequest, TextSearchRequest
router = APIRouter(prefix="/api/v2", tags=["V2 Automated"])
@router.post("/crawler/map")
async def auto_map(req: AutoMapRequest):
try:
res = crawler_service.map_site(req.url)
return make_response(1, res.pop("msg", "Started"), res)
except Exception as e:
return make_response(0, str(e))
@router.post("/crawler/process")
async def auto_process(req: AutoProcessRequest, bg_tasks: BackgroundTasks):
try:
bg_tasks.add_task(crawler_service.process_queue, req.task_id, req.batch_size)
return make_response(1, "Background processing started", {"task_id": req.task_id})
except Exception as e:
return make_response(0, str(e))
@router.post("/search")
async def search_smart(req: TextSearchRequest):
try:
res = crawler_service.search(req.query, req.task_id, req.limit)
return make_response(1, res.pop("msg", "Success"), res)
except Exception as e:
return make_response(0, str(e))