Add scheduled subset sync workflow
This commit is contained in:
103
scripts/build_singbox_rules.py
Normal file
103
scripts/build_singbox_rules.py
Normal file
@@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parent.parent
|
||||
SURGE_DIR = ROOT / "rule" / "Surge"
|
||||
SINGBOX_DIR = ROOT / "rule" / "sing-box"
|
||||
|
||||
SINGBOX_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
UPSTREAM_TARGETS = {
|
||||
"Lan": "Lan.list",
|
||||
"Apple": "Apple.list",
|
||||
"OpenAI": "OpenAI.list",
|
||||
"Gemini": "Gemini.list",
|
||||
"Claude": "Claude.list",
|
||||
"China": "China.list",
|
||||
"ChinaIPs": "ChinaIPs.list",
|
||||
"Proxy": "Proxy.list",
|
||||
}
|
||||
|
||||
FIELD_MAP = {
|
||||
"DOMAIN": "domain",
|
||||
"DOMAIN-SUFFIX": "domain_suffix",
|
||||
"DOMAIN-KEYWORD": "domain_keyword",
|
||||
"DOMAIN-REGEX": "domain_regex",
|
||||
"IP-CIDR": "ip_cidr",
|
||||
"IP-CIDR6": "ip_cidr",
|
||||
"PROCESS-NAME": "process_name",
|
||||
}
|
||||
|
||||
|
||||
def build_rule_set(entries: dict[str, list[str]]) -> dict:
|
||||
rules = []
|
||||
for field in ("domain", "domain_suffix", "domain_keyword", "domain_regex", "ip_cidr", "process_name"):
|
||||
values = entries.get(field, [])
|
||||
if values:
|
||||
rules.append({field: values})
|
||||
return {"version": 3, "rules": rules}
|
||||
|
||||
|
||||
def parse_surge_list(path: Path) -> dict:
|
||||
entries: dict[str, list[str]] = {}
|
||||
for raw in path.read_text(encoding="utf-8").splitlines():
|
||||
line = raw.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
parts = [part.strip() for part in line.split(",")]
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
kind = parts[0].upper()
|
||||
field = FIELD_MAP.get(kind)
|
||||
if not field:
|
||||
continue
|
||||
value = parts[1]
|
||||
if not value:
|
||||
continue
|
||||
entries.setdefault(field, [])
|
||||
if value not in entries[field]:
|
||||
entries[field].append(value)
|
||||
return build_rule_set(entries)
|
||||
|
||||
|
||||
MANUAL_RULESETS = {
|
||||
"ManualBackHome": {"version": 3, "rules": [{"ip_cidr": ["192.168.10.0/24"]}]},
|
||||
"ManualReject": {"version": 3, "rules": [{"domain": ["www.axure.com"]}]},
|
||||
"ManualAI": {"version": 3, "rules": [{"domain_keyword": ["macked"]}]},
|
||||
"ManualDirect": {
|
||||
"version": 3,
|
||||
"rules": [
|
||||
{
|
||||
"domain_suffix": [
|
||||
"umeng.com",
|
||||
"umsns.com",
|
||||
"umindex.com",
|
||||
"nice.com",
|
||||
"apple.com",
|
||||
"alicdn.com",
|
||||
"qujiangkeji.com",
|
||||
"banxueketang.com",
|
||||
"doubj.cn",
|
||||
"local",
|
||||
]
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
for name, filename in UPSTREAM_TARGETS.items():
|
||||
data = parse_surge_list(SURGE_DIR / filename)
|
||||
(SINGBOX_DIR / f"{name}.json").write_text(
|
||||
json.dumps(data, ensure_ascii=False, indent=2) + "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
for name, payload in MANUAL_RULESETS.items():
|
||||
(SINGBOX_DIR / f"{name}.json").write_text(
|
||||
json.dumps(payload, ensure_ascii=False, indent=2) + "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
Reference in New Issue
Block a user