Files
wiki_crawler/backend/schemas/schemas.py

50 lines
1.1 KiB
Python
Raw Normal View History

2025-12-20 17:08:54 +08:00
from pydantic import BaseModel
from typing import Optional, List, Any
2025-12-20 17:08:54 +08:00
class RegisterRequest(BaseModel):
url: str
class PendingRequest(BaseModel):
task_id: int
limit: Optional[int] = 10
class AddUrlsRequest(BaseModel):
task_id: int
urls_obj: dict
2025-12-20 17:08:54 +08:00
2025-12-22 22:08:51 +08:00
# schemas.py
2025-12-20 17:08:54 +08:00
class CrawlResult(BaseModel):
2025-12-22 22:08:51 +08:00
source_url: str
chunk_index: int # 新增字段
2025-12-20 17:08:54 +08:00
title: Optional[str] = None
content: Optional[str] = None
embedding: Optional[List[float]] = None
class SaveResultsRequest(BaseModel):
task_id: int
2025-12-23 00:36:49 +08:00
results: List[CrawlResult]
class SearchRequest(BaseModel):
# 如果不传 task_id则进行全库搜索
task_id: Optional[int] = None
2025-12-29 14:42:33 +08:00
query_embedding: dict
limit: Optional[int] = 5
# ... (保留原有的 Schema: RegisterRequest, AddUrlsRequest 等) ...
# === V2 New Schemas ===
class AutoMapRequest(BaseModel):
url: str
class AutoProcessRequest(BaseModel):
task_id: int
batch_size: Optional[int] = 5
class TextSearchRequest(BaseModel):
query: str # 用户直接传文字,不需要传向量了
task_id: Optional[int] = None
2025-12-23 00:36:49 +08:00
limit: Optional[int] = 5