Files
wiki_crawler/backend/utils.py
2025-12-20 17:08:54 +08:00

15 lines
555 B
Python

from urllib.parse import urlparse, urlunparse
from sqlalchemy import create_engine, MetaData, Table, select, update, and_
def normalize_url(url: str) -> str:
if not url: return ""
url = url.strip()
parsed = urlparse(url)
scheme = parsed.scheme.lower()
netloc = parsed.netloc.lower()
path = parsed.path.rstrip('/')
if not path: path = ""
return urlunparse((scheme, netloc, path, parsed.params, parsed.query, ""))
def make_response(code: int, msg: str, data: any = None):
return {"code": code, "msg": msg, "data": data}