编写未来计划表

This commit is contained in:
2026-01-13 17:42:19 +08:00
parent 36bc0cc08b
commit e1a94d4bc7
6 changed files with 71 additions and 11 deletions

View File

@@ -5,6 +5,8 @@ class Settings(BaseSettings):
系统配置类
自动读取环境变量或 .env 文件
"""
CANDIDATE_NUM: int = 10
DB_USER: str
DB_PASS: str
DB_HOST: str

View File

@@ -24,7 +24,7 @@ async def auto_process(req: AutoProcessRequest, bg_tasks: BackgroundTasks):
@router.post("/search")
async def search_smart(req: TextSearchRequest):
try:
res = crawler_service.search(req.query, req.task_id, req.limit)
res = crawler_service.search(req.query, req.task_id, req.return_num)
return make_response(1, res.pop("msg", "Success"), res)
except Exception as e:
return make_response(0, str(e))

View File

@@ -13,7 +13,6 @@ class AddUrlsRequest(BaseModel):
task_id: int
urls_obj: dict
# schemas.py
class CrawlResult(BaseModel):
source_url: str
chunk_index: int # 新增字段
@@ -31,11 +30,6 @@ class SearchRequest(BaseModel):
query_embedding: dict
limit: Optional[int] = 5
# ... (保留原有的 Schema: RegisterRequest, AddUrlsRequest 等) ...
# === V2 New Schemas ===
class AutoMapRequest(BaseModel):
url: str
@@ -47,4 +41,4 @@ class AutoProcessRequest(BaseModel):
class TextSearchRequest(BaseModel):
query: str # 用户直接传文字,不需要传向量了
task_id: Optional[int] = None
limit: Optional[int] = 5
return_num: Optional[int] = 5

View File

@@ -152,7 +152,7 @@ class CrawlerService:
# 2. 计算粗排召回数量
# 逻辑:至少召回 50 个,如果用户要很多,则召回 10 倍
coarse_limit = return_num * 10 if return_num * 10 > 50 else 50
coarse_limit = return_num * 10 if return_num * 10 > settings.CANDIDATE_NUM else settings.CANDIDATE_NUM
# 3. 执行混合检索 (粗排)
coarse_results = data_service.search(