362 lines
12 KiB
Python
362 lines
12 KiB
Python
#!/usr/bin/env python3
|
|
"""Compose-facing web server for MobileModels static pages and maintenance APIs."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import argparse
|
|
import json
|
|
import os
|
|
import re
|
|
import subprocess
|
|
import threading
|
|
from datetime import datetime
|
|
from http import HTTPStatus
|
|
from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
|
|
from pathlib import Path
|
|
|
|
from project_layout import PROJECT_ROOT, WORKSPACE_ROOT
|
|
from sync_upstream_mobilemodels import DEFAULT_BRANCH, DEFAULT_REPO_URL
|
|
|
|
|
|
SYNC_SCRIPT = PROJECT_ROOT / "tools/sync_upstream_mobilemodels.py"
|
|
INDEX_PATH = PROJECT_ROOT / "dist/device_index.json"
|
|
MYSQL_SEED_PATH = PROJECT_ROOT / "dist/mobilemodels_mysql_seed.sql"
|
|
MYSQL_LOADER = PROJECT_ROOT / "tools/load_mysql_seed.py"
|
|
DATA_ROOT = Path(os.environ.get("MOBILEMODELS_DATA_ROOT", "/data"))
|
|
SYNC_METADATA_PATH = DATA_ROOT / "state/sync_status.json"
|
|
SYNC_LOCK = threading.Lock()
|
|
NORMALIZE_RE = re.compile(r"[^0-9a-z\u4e00-\u9fff]+")
|
|
|
|
|
|
def mysql_auto_load_enabled() -> bool:
|
|
return os.environ.get("MYSQL_AUTO_LOAD", "0").strip().lower() in {"1", "true", "yes", "on"}
|
|
|
|
|
|
def run_command(args: list[str]) -> subprocess.CompletedProcess[str]:
|
|
return subprocess.run(
|
|
args,
|
|
cwd=PROJECT_ROOT,
|
|
text=True,
|
|
capture_output=True,
|
|
check=False,
|
|
)
|
|
|
|
|
|
def normalize_text(text: str) -> str:
|
|
return NORMALIZE_RE.sub("", (text or "").lower())
|
|
|
|
|
|
def sql_string(value: str) -> str:
|
|
return (value or "").replace("\\", "\\\\").replace("'", "''")
|
|
|
|
|
|
def mysql_command(database: str | None = None) -> list[str]:
|
|
command = [
|
|
"mysql",
|
|
f"--host={os.environ.get('MYSQL_HOST', 'mysql')}",
|
|
f"--port={os.environ.get('MYSQL_PORT', '3306')}",
|
|
f"--user={os.environ.get('MYSQL_READER_USER', '')}",
|
|
"--protocol=TCP",
|
|
"--default-character-set=utf8mb4",
|
|
"--batch",
|
|
"--raw",
|
|
]
|
|
if database:
|
|
command.append(database)
|
|
return command
|
|
|
|
|
|
def mysql_env() -> dict[str, str]:
|
|
env = os.environ.copy()
|
|
env["MYSQL_PWD"] = os.environ.get("MYSQL_READER_PASSWORD", "")
|
|
return env
|
|
|
|
|
|
def run_mysql_query(sql: str, database: str | None = None) -> list[dict[str, str | None]]:
|
|
proc = subprocess.run(
|
|
mysql_command(database=database),
|
|
env=mysql_env(),
|
|
input=sql,
|
|
text=True,
|
|
stdout=subprocess.PIPE,
|
|
stderr=subprocess.PIPE,
|
|
check=False,
|
|
)
|
|
if proc.returncode != 0:
|
|
message = proc.stderr.strip() or proc.stdout.strip() or f"mysql exited with {proc.returncode}"
|
|
raise RuntimeError(message)
|
|
|
|
lines = [line for line in proc.stdout.splitlines() if line.strip()]
|
|
if not lines:
|
|
return []
|
|
|
|
headers = lines[0].split("\t")
|
|
rows: list[dict[str, str | None]] = []
|
|
for line in lines[1:]:
|
|
values = line.split("\t")
|
|
row = {}
|
|
for idx, header in enumerate(headers):
|
|
value = values[idx] if idx < len(values) else ""
|
|
row[header] = None if value == "NULL" else value
|
|
rows.append(row)
|
|
return rows
|
|
|
|
|
|
def build_sql_query_payload(payload: dict[str, object]) -> dict[str, object]:
|
|
raw_value = str(payload.get("model_raw") or payload.get("model") or "").strip()
|
|
if not raw_value:
|
|
raise RuntimeError("请填写设备标识。")
|
|
|
|
alias_norm = normalize_text(raw_value)
|
|
if not alias_norm:
|
|
raise RuntimeError("设备标识无法归一化,请检查输入。")
|
|
|
|
limit_value = payload.get("limit", 20)
|
|
try:
|
|
limit = int(limit_value)
|
|
except Exception as err:
|
|
raise RuntimeError("limit 必须是数字。") from err
|
|
limit = max(1, min(limit, 100))
|
|
|
|
sql = f"""
|
|
SELECT
|
|
model,
|
|
record_id,
|
|
alias_norm,
|
|
device_name,
|
|
brand,
|
|
manufacturer_brand,
|
|
parent_brand,
|
|
market_brand,
|
|
device_type,
|
|
source_file,
|
|
section,
|
|
source_rank,
|
|
source_weight,
|
|
code,
|
|
code_alias,
|
|
ver_name
|
|
FROM mobilemodels.mm_device_catalog
|
|
WHERE alias_norm = '{sql_string(alias_norm)}'
|
|
ORDER BY source_rank ASC, record_id ASC
|
|
LIMIT {limit};
|
|
""".strip()
|
|
|
|
rows = run_mysql_query(sql)
|
|
return {
|
|
"query_mode": "sql",
|
|
"model_raw": raw_value,
|
|
"alias_norm": alias_norm,
|
|
"limit": limit,
|
|
"sql": sql,
|
|
"rows": rows,
|
|
"row_count": len(rows),
|
|
}
|
|
|
|
|
|
def read_sync_metadata() -> dict[str, object]:
|
|
if not SYNC_METADATA_PATH.exists():
|
|
return {}
|
|
try:
|
|
return json.loads(SYNC_METADATA_PATH.read_text(encoding="utf-8"))
|
|
except Exception:
|
|
return {}
|
|
|
|
|
|
def write_sync_metadata(payload: dict[str, object]) -> None:
|
|
SYNC_METADATA_PATH.parent.mkdir(parents=True, exist_ok=True)
|
|
SYNC_METADATA_PATH.write_text(
|
|
json.dumps(payload, ensure_ascii=False, indent=2),
|
|
encoding="utf-8",
|
|
)
|
|
|
|
|
|
def get_status_payload() -> dict[str, object]:
|
|
index_mtime = None
|
|
mysql_seed_mtime = None
|
|
if INDEX_PATH.exists():
|
|
index_mtime = datetime.fromtimestamp(INDEX_PATH.stat().st_mtime).isoformat(timespec="seconds")
|
|
if MYSQL_SEED_PATH.exists():
|
|
mysql_seed_mtime = datetime.fromtimestamp(MYSQL_SEED_PATH.stat().st_mtime).isoformat(timespec="seconds")
|
|
|
|
mysql_host = os.environ.get("MYSQL_HOST", "mysql")
|
|
mysql_port = os.environ.get("MYSQL_PORT", "3306")
|
|
mysql_database = os.environ.get("MYSQL_DATABASE", "mobilemodels")
|
|
mysql_reader_user = os.environ.get("MYSQL_READER_USER", "")
|
|
mysql_reader_password = os.environ.get("MYSQL_READER_PASSWORD", "")
|
|
mysql_auto_load = mysql_auto_load_enabled()
|
|
mysql_ready = False
|
|
mysql_status = ""
|
|
sync_metadata = read_sync_metadata()
|
|
if mysql_auto_load:
|
|
mysql_proc = run_command(["python3", str(MYSQL_LOADER), "--check-only", "--wait-timeout", "5"])
|
|
if mysql_proc.returncode == 0:
|
|
mysql_ready = True
|
|
mysql_status = mysql_proc.stdout.strip() or "MySQL ready"
|
|
else:
|
|
mysql_status = mysql_proc.stderr.strip() or mysql_proc.stdout.strip() or "MySQL unavailable"
|
|
else:
|
|
mysql_status = "MySQL auto load disabled"
|
|
|
|
return {
|
|
"supports_upstream_sync": True,
|
|
"storage_mode": "docker_volume",
|
|
"project_root": str(PROJECT_ROOT),
|
|
"workspace_root": str(WORKSPACE_ROOT),
|
|
"data_root": str(DATA_ROOT),
|
|
"mysql_auto_load": mysql_auto_load,
|
|
"upstream_repo_url": DEFAULT_REPO_URL,
|
|
"upstream_branch": DEFAULT_BRANCH,
|
|
"last_sync_time": sync_metadata.get("last_sync_time"),
|
|
"last_upstream_commit": sync_metadata.get("last_upstream_commit"),
|
|
"index_file": str(INDEX_PATH.relative_to(PROJECT_ROOT)),
|
|
"index_mtime": index_mtime,
|
|
"mysql_seed_file": str(MYSQL_SEED_PATH.relative_to(PROJECT_ROOT)),
|
|
"mysql_seed_mtime": mysql_seed_mtime,
|
|
"mysql_host": mysql_host,
|
|
"mysql_port": mysql_port,
|
|
"mysql_database": mysql_database,
|
|
"mysql_reader_user": mysql_reader_user,
|
|
"mysql_reader_password": mysql_reader_password,
|
|
"mysql_ready": mysql_ready,
|
|
"mysql_status": mysql_status,
|
|
}
|
|
|
|
|
|
def run_upstream_sync() -> dict[str, object]:
|
|
if not SYNC_LOCK.acquire(blocking=False):
|
|
raise RuntimeError("已有同步任务在执行,请稍后再试。")
|
|
|
|
try:
|
|
upstream_proc = run_command(
|
|
["git", "ls-remote", DEFAULT_REPO_URL, f"refs/heads/{DEFAULT_BRANCH}"]
|
|
)
|
|
upstream_commit = ""
|
|
if upstream_proc.returncode == 0 and upstream_proc.stdout.strip():
|
|
upstream_commit = upstream_proc.stdout.split()[0]
|
|
|
|
command = [
|
|
"python3",
|
|
str(SYNC_SCRIPT),
|
|
"--build-index",
|
|
"--export-mysql-seed",
|
|
]
|
|
if mysql_auto_load_enabled():
|
|
command.append("--load-mysql")
|
|
proc = run_command(command)
|
|
output = "\n".join(
|
|
part for part in [proc.stdout.strip(), proc.stderr.strip()] if part
|
|
).strip()
|
|
|
|
if proc.returncode != 0:
|
|
raise RuntimeError(output or f"sync script failed with exit code {proc.returncode}")
|
|
|
|
payload = {
|
|
"storage_mode": "docker_volume",
|
|
"project_root": str(PROJECT_ROOT),
|
|
"workspace_root": str(WORKSPACE_ROOT),
|
|
"data_root": str(DATA_ROOT),
|
|
"upstream_repo_url": DEFAULT_REPO_URL,
|
|
"upstream_branch": DEFAULT_BRANCH,
|
|
"upstream_commit": upstream_commit,
|
|
"last_sync_time": datetime.now().isoformat(timespec="seconds"),
|
|
"last_upstream_commit": upstream_commit,
|
|
"index_file": str(INDEX_PATH.relative_to(PROJECT_ROOT)),
|
|
"index_mtime": datetime.fromtimestamp(INDEX_PATH.stat().st_mtime).isoformat(timespec="seconds")
|
|
if INDEX_PATH.exists()
|
|
else None,
|
|
"mysql_seed_file": str(MYSQL_SEED_PATH.relative_to(PROJECT_ROOT)),
|
|
"mysql_seed_mtime": datetime.fromtimestamp(MYSQL_SEED_PATH.stat().st_mtime).isoformat(timespec="seconds")
|
|
if MYSQL_SEED_PATH.exists()
|
|
else None,
|
|
"output": output or "同步脚本执行完成。",
|
|
}
|
|
write_sync_metadata({
|
|
"last_sync_time": payload["last_sync_time"],
|
|
"last_upstream_commit": payload["last_upstream_commit"],
|
|
"upstream_repo_url": DEFAULT_REPO_URL,
|
|
"upstream_branch": DEFAULT_BRANCH,
|
|
})
|
|
return payload
|
|
finally:
|
|
SYNC_LOCK.release()
|
|
|
|
|
|
class MobileModelsHandler(SimpleHTTPRequestHandler):
|
|
def __init__(self, *args, **kwargs):
|
|
super().__init__(*args, directory=str(PROJECT_ROOT), **kwargs)
|
|
|
|
def guess_type(self, path: str) -> str:
|
|
content_type = super().guess_type(path)
|
|
lower_path = path.lower()
|
|
if lower_path.endswith(".md"):
|
|
return "text/markdown; charset=utf-8"
|
|
if lower_path.endswith(".txt"):
|
|
return "text/plain; charset=utf-8"
|
|
if content_type.startswith("text/") and "charset=" not in content_type:
|
|
return f"{content_type}; charset=utf-8"
|
|
return content_type
|
|
|
|
def _send_json(self, payload: dict[str, object], status: int = HTTPStatus.OK) -> None:
|
|
data = json.dumps(payload, ensure_ascii=False).encode("utf-8")
|
|
self.send_response(status)
|
|
self.send_header("Content-Type", "application/json; charset=utf-8")
|
|
self.send_header("Content-Length", str(len(data)))
|
|
self.send_header("Cache-Control", "no-store")
|
|
self.end_headers()
|
|
self.wfile.write(data)
|
|
|
|
def do_GET(self) -> None:
|
|
if self.path == "/api/status":
|
|
try:
|
|
self._send_json(get_status_payload())
|
|
except Exception as err:
|
|
self._send_json({"error": str(err)}, status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
return
|
|
return super().do_GET()
|
|
|
|
def do_POST(self) -> None:
|
|
if self.path == "/api/sync-upstream":
|
|
try:
|
|
payload = run_upstream_sync()
|
|
self._send_json(payload)
|
|
except RuntimeError as err:
|
|
status = HTTPStatus.CONFLICT if "已有同步任务" in str(err) else HTTPStatus.INTERNAL_SERVER_ERROR
|
|
self._send_json({"error": str(err)}, status=status)
|
|
except Exception as err:
|
|
self._send_json({"error": str(err)}, status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
return
|
|
if self.path == "/api/query-sql":
|
|
try:
|
|
content_length = int(self.headers.get("Content-Length", "0") or "0")
|
|
raw_body = self.rfile.read(content_length) if content_length > 0 else b"{}"
|
|
req = json.loads(raw_body.decode("utf-8") or "{}")
|
|
payload = build_sql_query_payload(req if isinstance(req, dict) else {})
|
|
self._send_json(payload)
|
|
except RuntimeError as err:
|
|
self._send_json({"error": str(err)}, status=HTTPStatus.BAD_REQUEST)
|
|
except Exception as err:
|
|
self._send_json({"error": str(err)}, status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
|
return
|
|
|
|
self._send_json({"error": "Not found"}, status=HTTPStatus.NOT_FOUND)
|
|
|
|
|
|
def parse_args() -> argparse.Namespace:
|
|
parser = argparse.ArgumentParser(description="Run the MobileModels web server inside Docker Compose.")
|
|
parser.add_argument("--host", default="127.0.0.1", help="Bind host")
|
|
parser.add_argument("--port", type=int, default=8123, help="Bind port")
|
|
return parser.parse_args()
|
|
|
|
|
|
def main() -> int:
|
|
args = parse_args()
|
|
server = ThreadingHTTPServer((args.host, args.port), MobileModelsHandler)
|
|
print(f"Serving MobileModels on http://{args.host}:{args.port}")
|
|
server.serve_forever()
|
|
return 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
raise SystemExit(main())
|