Files
wiki_crawler/backend/schemas.py

31 lines
741 B
Python
Raw Normal View History

2025-12-20 17:08:54 +08:00
from pydantic import BaseModel
from typing import List, Optional
class RegisterRequest(BaseModel):
url: str
class PendingRequest(BaseModel):
task_id: int
limit: Optional[int] = 10
class AddUrlsRequest(BaseModel):
task_id: int
urls: List[str]
2025-12-22 22:08:51 +08:00
# schemas.py
2025-12-20 17:08:54 +08:00
class CrawlResult(BaseModel):
2025-12-22 22:08:51 +08:00
source_url: str
chunk_index: int # 新增字段
2025-12-20 17:08:54 +08:00
title: Optional[str] = None
content: Optional[str] = None
embedding: Optional[List[float]] = None
class SaveResultsRequest(BaseModel):
task_id: int
2025-12-23 00:36:49 +08:00
results: List[CrawlResult]
class SearchRequest(BaseModel):
# 如果不传 task_id则进行全库搜索
task_id: Optional[int] = None
query_embedding: List[float]
limit: Optional[int] = 5