39 lines
1.3 KiB
Python
39 lines
1.3 KiB
Python
from fastapi import FastAPI
|
|
from .service import crawler_service
|
|
from .schemas import RegisterRequest, PendingRequest, SaveResultsRequest, AddUrlsRequest
|
|
from .utils import make_response
|
|
|
|
app = FastAPI(title="Wiki Crawler API")
|
|
|
|
@app.post("/register")
|
|
async def register(req: RegisterRequest):
|
|
try:
|
|
data = crawler_service.register_task(req.url)
|
|
return make_response(1, "Success", data)
|
|
except Exception as e:
|
|
return make_response(0, str(e))
|
|
|
|
@app.post("/add_urls")
|
|
async def add_urls(req: AddUrlsRequest):
|
|
try:
|
|
data = crawler_service.add_urls(req.task_id, req.urls)
|
|
return make_response(1, "Success", data)
|
|
except Exception as e:
|
|
return make_response(0, str(e))
|
|
|
|
@app.post("/pending_urls")
|
|
async def pending_urls(req: PendingRequest):
|
|
try:
|
|
data = crawler_service.get_pending_urls(req.task_id, req.limit)
|
|
msg = "Success" if data["urls"] else "Queue Empty"
|
|
return make_response(1, msg, data)
|
|
except Exception as e:
|
|
return make_response(0, str(e))
|
|
|
|
@app.post("/save_results")
|
|
async def save_results(req: SaveResultsRequest):
|
|
try:
|
|
data = crawler_service.save_results(req.task_id, req.results)
|
|
return make_response(1, "Success", data)
|
|
except Exception as e:
|
|
return make_response(0, str(e)) |