This commit is contained in:
riglen
2026-03-31 15:51:18 +08:00
parent e3cc26d4f6
commit 0d49398e2d
21 changed files with 1483 additions and 0 deletions

1
app/__init__.py Normal file
View File

@@ -0,0 +1 @@
__all__ = []

41
app/config.py Normal file
View File

@@ -0,0 +1,41 @@
from __future__ import annotations
from functools import lru_cache
from pathlib import Path
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict
ROOT_DIR = Path(__file__).resolve().parent.parent
CONFIG_DIR = ROOT_DIR / "config"
DATA_DIR = ROOT_DIR / "data"
class Settings(BaseSettings):
app_name: str = "sub-provider"
app_env: str = "prod"
host: str = "0.0.0.0"
port: int = 18080
log_level: str = "info"
public_path: str = Field(default="change-me-random-hash-path")
public_base_url: str | None = None
request_timeout_seconds: float = 20.0
cache_ttl_seconds: int = 900
max_proxy_name_length: int = 80
default_user_agent: str = "sub-provider/0.2"
sources_file: Path = CONFIG_DIR / "sources.yaml"
rules_dir: Path = CONFIG_DIR / "rules"
model_config = SettingsConfigDict(
env_file=ROOT_DIR / ".env",
env_file_encoding="utf-8",
extra="ignore",
)
@lru_cache(maxsize=1)
def get_settings() -> Settings:
return Settings()

167
app/main.py Normal file
View File

@@ -0,0 +1,167 @@
from __future__ import annotations
from fastapi import FastAPI, HTTPException, Query, Request
from fastapi.responses import Response
from app.config import get_settings
from app.models import RuleConfig, SourceConfig
from app.services.loader import load_app_config
from app.services.profiles import build_bundle_profile, build_thin_profile, dump_yaml
from app.services.rules import load_rule_text
from app.services.subscriptions import (
build_merged_provider_document,
build_provider_document,
build_source_snapshots,
dump_provider_yaml,
get_first_quota,
)
settings = get_settings()
app = FastAPI(title=settings.app_name)
app_config = load_app_config(settings.sources_file)
PUBLIC_PREFIX = "/" + (app_config.public_path or settings.public_path).strip("/")
@app.get("/healthz")
async def healthz() -> dict[str, str]:
return {"status": "ok"}
def _base_url(request: Request) -> str:
if settings.public_base_url:
return settings.public_base_url.rstrip("/")
return str(request.base_url).rstrip("/")
def _resolve_sources(sources: str | None) -> list[tuple[str, SourceConfig]]:
enabled = [(name, src) for name, src in app_config.sources.items() if src.enabled]
if not sources:
return enabled
names = [item.strip() for item in sources.split(",") if item.strip()]
selected: list[tuple[str, SourceConfig]] = []
seen: set[str] = set()
for name in names:
if name in seen:
continue
source = app_config.sources.get(name)
if source is None or not source.enabled:
raise HTTPException(status_code=404, detail=f"source not found or disabled: {name}")
selected.append((name, source))
seen.add(name)
if not selected:
raise HTTPException(status_code=400, detail="no sources selected")
return selected
def _rule_path(rule: RuleConfig):
path = (settings.rules_dir / rule.file).resolve()
if not path.is_file() or settings.rules_dir.resolve() not in path.parents:
raise HTTPException(status_code=404, detail="rule file missing")
return path
async def _build_quota_headers(source_items: list[tuple[str, SourceConfig]]) -> dict[str, str]:
headers: dict[str, str] = {}
quota = await get_first_quota(source_items)
if quota and not quota.is_empty():
headers["Subscription-Userinfo"] = quota.to_header_value()
return headers
def _yaml_response(content: str, request: Request, headers: dict[str, str] | None = None, filename: str | None = None) -> Response:
final_headers = {
"Content-Type": "text/yaml; charset=utf-8",
"Cache-Control": "no-store",
}
if headers:
final_headers.update(headers)
if filename:
final_headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{filename}"
body = "" if request.method == "HEAD" else content
return Response(content=body, media_type="text/yaml; charset=utf-8", headers=final_headers)
@app.api_route(PUBLIC_PREFIX + "/providers/merged.yaml", methods=["GET", "HEAD"])
async def merged_provider(request: Request, sources: str | None = Query(default=None)) -> Response:
source_items = _resolve_sources(sources)
try:
document = await build_merged_provider_document(source_items)
except Exception as exc: # noqa: BLE001
raise HTTPException(status_code=502, detail=f"failed to build merged provider: {exc}") from exc
content = dump_provider_yaml(document)
headers = await _build_quota_headers(source_items)
return _yaml_response(content, request, headers=headers, filename="merged.yaml")
@app.api_route(PUBLIC_PREFIX + "/providers/{name}.yaml", methods=["GET", "HEAD"])
async def provider(name: str, request: Request) -> Response:
source = app_config.sources.get(name)
if source is None or not source.enabled:
raise HTTPException(status_code=404, detail="provider not found")
try:
document = await build_provider_document(name, source)
except Exception as exc: # noqa: BLE001
raise HTTPException(status_code=502, detail=f"failed to build provider: {exc}") from exc
content = dump_provider_yaml(document)
headers = await _build_quota_headers([(name, source)])
return _yaml_response(content, request, headers=headers, filename=f"{name}.yaml")
@app.api_route(PUBLIC_PREFIX + "/rules/{name}.yaml", methods=["GET", "HEAD"])
async def rule_file(name: str, request: Request) -> Response:
rule = app_config.rules.get(name)
if rule is None:
raise HTTPException(status_code=404, detail="rule not found")
content = load_rule_text(_rule_path(rule))
return _yaml_response(content, request, filename=f"{name}.yaml")
@app.api_route(PUBLIC_PREFIX + "/clients/{client_type}.yaml", methods=["GET", "HEAD"])
async def client_profile(client_type: str, request: Request, sources: str | None = Query(default=None)) -> Response:
client = app_config.clients.get(client_type)
if client is None:
raise HTTPException(status_code=404, detail="client config not found")
source_items = _resolve_sources(sources)
content = dump_yaml(
build_thin_profile(
client_type=client_type,
app_config=app_config,
client=client,
selected_source_names=[name for name, _ in source_items],
base_url=_base_url(request),
public_path=(app_config.public_path or settings.public_path).strip("/"),
)
)
headers = {"profile-update-interval": str(client.provider_interval)}
headers.update(await _build_quota_headers(source_items))
return _yaml_response(content, request, headers=headers, filename=f"{client_type}.yaml")
@app.api_route(PUBLIC_PREFIX + "/bundle/{client_type}.yaml", methods=["GET", "HEAD"])
async def bundle_profile(client_type: str, request: Request, sources: str | None = Query(default=None)) -> Response:
client = app_config.clients.get(client_type)
if client is None:
raise HTTPException(status_code=404, detail="client config not found")
source_items = _resolve_sources(sources)
try:
snapshots = await build_source_snapshots(source_items)
except Exception as exc: # noqa: BLE001
raise HTTPException(status_code=502, detail=f"failed to build bundle: {exc}") from exc
content = dump_yaml(
build_bundle_profile(
client_type=client_type,
app_config=app_config,
client=client,
snapshots=snapshots,
)
)
headers = {"profile-update-interval": str(client.provider_interval)}
headers.update(await _build_quota_headers(source_items))
return _yaml_response(content, request, headers=headers, filename=f"bundle-{client_type}.yaml")

99
app/models.py Normal file
View File

@@ -0,0 +1,99 @@
from __future__ import annotations
from typing import Any, Literal
from pydantic import BaseModel, Field, HttpUrl
class SourceConfig(BaseModel):
enabled: bool = True
kind: Literal["clash_yaml"] = "clash_yaml"
url: str
display_name: str | None = None
headers: dict[str, str] = Field(default_factory=dict)
include_regex: str | None = None
exclude_regex: str | None = None
prefix: str = ""
suffix: str = ""
cache_ttl_seconds: int | None = None
class RuleConfig(BaseModel):
file: str
behavior: Literal["domain", "ipcidr", "classical"] = "domain"
format: Literal["yaml", "text", "mrs"] = "yaml"
interval: int = 86400
policy: str
no_resolve: bool = False
class RegionConfig(BaseModel):
name: str
filter: str
tolerance: int = 50
class ClientConfig(BaseModel):
title: str
provider_interval: int = 21600
rule_interval: int = 86400
test_url: HttpUrl = "https://www.gstatic.com/generate_204"
test_interval: int = 300
main_policy: str = "节点选择"
source_policy: str = "☁️ 机场选择"
mixed_auto_policy: str = "♻️ 自动选择"
manual_policy: str = "🚀 手动切换"
direct_policy: str = "DIRECT"
mode: str = "rule"
allow_lan: bool = True
ipv6: bool = True
mixed_port: int | None = 7890
socks_port: int | None = 7891
log_level: str | None = "info"
class AppConfig(BaseModel):
public_path: str | None = None
sources: dict[str, SourceConfig]
rules: dict[str, RuleConfig] = Field(default_factory=dict)
clients: dict[str, ClientConfig] = Field(default_factory=dict)
regions: dict[str, RegionConfig] = Field(default_factory=dict)
class FetchResult(BaseModel):
text: str
headers: dict[str, str] = Field(default_factory=dict)
class ProviderDocument(BaseModel):
proxies: list[dict[str, Any]]
class SubscriptionUserInfo(BaseModel):
upload: int | None = None
download: int | None = None
total: int | None = None
expire: int | None = None
def to_header_value(self) -> str:
parts: list[str] = []
if self.upload is not None:
parts.append(f"upload={self.upload}")
if self.download is not None:
parts.append(f"download={self.download}")
if self.total is not None:
parts.append(f"total={self.total}")
if self.expire is not None:
parts.append(f"expire={self.expire}")
return "; ".join(parts)
def is_empty(self) -> bool:
return self.upload is None and self.download is None and self.total is None and self.expire is None
class SourceSnapshot(BaseModel):
name: str
display_name: str
document: ProviderDocument
headers: dict[str, str] = Field(default_factory=dict)
quota: SubscriptionUserInfo | None = None

30
app/services/cache.py Normal file
View File

@@ -0,0 +1,30 @@
from __future__ import annotations
import time
from dataclasses import dataclass
from typing import Generic, TypeVar
T = TypeVar("T")
@dataclass
class CacheItem(Generic[T]):
value: T
expires_at: float
class TTLCache(Generic[T]):
def __init__(self) -> None:
self._store: dict[str, CacheItem[T]] = {}
def get(self, key: str) -> T | None:
item = self._store.get(key)
if item is None:
return None
if item.expires_at < time.time():
self._store.pop(key, None)
return None
return item.value
def set(self, key: str, value: T, ttl_seconds: int) -> None:
self._store[key] = CacheItem(value=value, expires_at=time.time() + ttl_seconds)

29
app/services/headers.py Normal file
View File

@@ -0,0 +1,29 @@
from __future__ import annotations
import re
from typing import Mapping
from app.models import SubscriptionUserInfo
_SUBSCRIPTION_FIELDS = re.compile(r"(upload|download|total|expire)=(\d+)")
def get_header_case_insensitive(headers: Mapping[str, str], name: str) -> str | None:
target = name.lower()
for key, value in headers.items():
if key.lower() == target:
return value
return None
def parse_subscription_userinfo(headers: Mapping[str, str]) -> SubscriptionUserInfo | None:
raw = get_header_case_insensitive(headers, "Subscription-Userinfo")
if not raw:
return None
values: dict[str, int] = {}
for key, value in _SUBSCRIPTION_FIELDS.findall(raw):
values[key] = int(value)
info = SubscriptionUserInfo(**values)
return None if info.is_empty() else info

27
app/services/loader.py Normal file
View File

@@ -0,0 +1,27 @@
from __future__ import annotations
import os
import re
from pathlib import Path
import yaml
from app.models import AppConfig
_ENV_PATTERN = re.compile(r"\$\{([A-Z0-9_]+)\}")
def _expand_env(value):
if isinstance(value, str):
return _ENV_PATTERN.sub(lambda m: os.getenv(m.group(1), ""), value)
if isinstance(value, list):
return [_expand_env(v) for v in value]
if isinstance(value, dict):
return {k: _expand_env(v) for k, v in value.items()}
return value
def load_app_config(path: Path) -> AppConfig:
raw = yaml.safe_load(path.read_text(encoding="utf-8")) or {}
expanded = _expand_env(raw)
return AppConfig.model_validate(expanded)

275
app/services/profiles.py Normal file
View File

@@ -0,0 +1,275 @@
from __future__ import annotations
import re
from typing import Any
import yaml
from app.models import AppConfig, ClientConfig, SourceSnapshot
from app.services.rules import build_inline_rules, build_rule_provider_entries, build_rule_set_references
def dump_yaml(data: dict[str, Any]) -> str:
return yaml.safe_dump(data, allow_unicode=True, sort_keys=False, default_flow_style=False)
def build_thin_profile(
*,
client_type: str,
app_config: AppConfig,
client: ClientConfig,
selected_source_names: list[str],
base_url: str,
public_path: str,
) -> dict[str, Any]:
profile: dict[str, Any] = {
"mode": client.mode,
"ipv6": client.ipv6,
}
if client.log_level:
profile["log-level"] = client.log_level
if client_type == "mihomo":
if client.mixed_port is not None:
profile["mixed-port"] = client.mixed_port
if client.socks_port is not None:
profile["socks-port"] = client.socks_port
profile["allow-lan"] = client.allow_lan
proxy_providers: dict[str, dict[str, Any]] = {}
for name in selected_source_names:
if client_type == "mihomo":
proxy_providers[name] = {
"type": "http",
"url": f"{base_url}/{public_path}/providers/{name}.yaml",
"path": f"./providers/{name}.yaml",
"interval": client.provider_interval,
"health-check": {
"enable": True,
"url": str(client.test_url),
"interval": client.test_interval,
},
}
else:
proxy_providers[name] = {
"url": f"{base_url}/{public_path}/providers/{name}.yaml",
"interval": client.provider_interval,
}
profile["proxy-providers"] = proxy_providers
profile["proxy-groups"] = _build_thin_groups(client_type, app_config, client, selected_source_names)
profile["rule-providers"] = build_rule_provider_entries(app_config, client, base_url, public_path)
profile["rules"] = build_rule_set_references(app_config, client)
return profile
def build_bundle_profile(
*,
client_type: str,
app_config: AppConfig,
client: ClientConfig,
snapshots: list[SourceSnapshot],
) -> dict[str, Any]:
profile: dict[str, Any] = {
"mode": client.mode,
"ipv6": client.ipv6,
}
if client.log_level:
profile["log-level"] = client.log_level
if client_type == "mihomo":
if client.mixed_port is not None:
profile["mixed-port"] = client.mixed_port
if client.socks_port is not None:
profile["socks-port"] = client.socks_port
profile["allow-lan"] = client.allow_lan
proxies: list[dict[str, Any]] = []
source_proxy_names: dict[str, list[str]] = {}
seen: set[str] = set()
for snapshot in snapshots:
names: list[str] = []
for proxy in snapshot.document.proxies:
candidate = dict(proxy)
name = str(candidate.get("name", "")).strip()
if not name:
continue
original = name
index = 2
while name in seen:
name = f"{original} #{index}"
index += 1
seen.add(name)
candidate["name"] = name
proxies.append(candidate)
names.append(name)
source_proxy_names[snapshot.name] = names
profile["proxies"] = proxies
profile["proxy-groups"] = _build_bundle_groups(app_config, client, snapshots, source_proxy_names)
profile["rules"] = build_inline_rules(app_config, client)
return profile
def _build_thin_groups(client_type: str, app_config: AppConfig, client: ClientConfig, selected_source_names: list[str]) -> list[dict[str, Any]]:
groups: list[dict[str, Any]] = []
source_auto_names: list[str] = []
for source_name in selected_source_names:
display_name = app_config.sources[source_name].display_name or source_name
group_name = f"{display_name} 自动"
source_auto_names.append(group_name)
groups.append(
{
"name": group_name,
"type": "url-test",
"url": str(client.test_url),
"interval": client.test_interval,
"use": [source_name],
}
)
if client_type == "mihomo":
mixed_auto = {
"name": client.mixed_auto_policy,
"type": "url-test",
"url": str(client.test_url),
"interval": client.test_interval,
"include-all-providers": True,
}
manual = {
"name": client.manual_policy,
"type": "select",
"proxies": [client.direct_policy],
"include-all-providers": True,
}
else:
mixed_auto = {
"name": client.mixed_auto_policy,
"type": "url-test",
"url": str(client.test_url),
"interval": client.test_interval,
"include-all": True,
}
manual = {
"name": client.manual_policy,
"type": "select",
"proxies": [client.direct_policy],
"include-all": True,
}
groups.append(mixed_auto)
region_names: list[str] = []
for region in app_config.regions.values():
group = {
"name": region.name,
"type": "url-test",
"url": str(client.test_url),
"interval": client.test_interval,
"filter": region.filter,
"tolerance": region.tolerance,
}
if client_type == "mihomo":
group["include-all-providers"] = True
else:
group["include-all"] = True
groups.append(group)
region_names.append(region.name)
groups.append(
{
"name": client.source_policy,
"type": "select",
"proxies": [client.mixed_auto_policy, *source_auto_names, client.direct_policy],
}
)
groups.append(manual)
groups.append(
{
"name": client.main_policy,
"type": "select",
"proxies": [
client.source_policy,
client.mixed_auto_policy,
*region_names,
client.manual_policy,
client.direct_policy,
],
}
)
return groups
def _build_bundle_groups(
app_config: AppConfig,
client: ClientConfig,
snapshots: list[SourceSnapshot],
source_proxy_names: dict[str, list[str]],
) -> list[dict[str, Any]]:
groups: list[dict[str, Any]] = []
source_auto_names: list[str] = []
all_proxy_names = [name for names in source_proxy_names.values() for name in names]
for snapshot in snapshots:
group_name = f"{snapshot.display_name} 自动"
source_auto_names.append(group_name)
groups.append(
{
"name": group_name,
"type": "url-test",
"url": str(client.test_url),
"interval": client.test_interval,
"proxies": source_proxy_names.get(snapshot.name) or [client.direct_policy],
}
)
groups.append(
{
"name": client.mixed_auto_policy,
"type": "url-test",
"url": str(client.test_url),
"interval": client.test_interval,
"proxies": all_proxy_names or [client.direct_policy],
}
)
region_names: list[str] = []
for region in app_config.regions.values():
matched = [name for name in all_proxy_names if re.search(region.filter, name)]
groups.append(
{
"name": region.name,
"type": "url-test",
"url": str(client.test_url),
"interval": client.test_interval,
"tolerance": region.tolerance,
"proxies": matched or [client.direct_policy],
}
)
region_names.append(region.name)
groups.append(
{
"name": client.source_policy,
"type": "select",
"proxies": [client.mixed_auto_policy, *source_auto_names, client.direct_policy],
}
)
groups.append(
{
"name": client.manual_policy,
"type": "select",
"proxies": [*all_proxy_names, client.direct_policy] if all_proxy_names else [client.direct_policy],
}
)
groups.append(
{
"name": client.main_policy,
"type": "select",
"proxies": [
client.source_policy,
client.mixed_auto_policy,
*region_names,
client.manual_policy,
client.direct_policy,
],
}
)
return groups

80
app/services/rules.py Normal file
View File

@@ -0,0 +1,80 @@
from __future__ import annotations
from pathlib import Path
import yaml
from app.config import get_settings
from app.models import AppConfig, ClientConfig, RuleConfig
def resolve_policy(policy: str, client: ClientConfig) -> str:
return (
policy.replace("{{ main_policy }}", client.main_policy)
.replace("{{main_policy}}", client.main_policy)
.replace("{{ direct_policy }}", client.direct_policy)
.replace("{{direct_policy}}", client.direct_policy)
)
def load_rule_text(path: Path) -> str:
return path.read_text(encoding="utf-8")
def load_rule_payload(path: Path) -> list[str]:
if path.suffix.lower() in {".yaml", ".yml"}:
data = yaml.safe_load(path.read_text(encoding="utf-8")) or {}
payload = data.get("payload", [])
if not isinstance(payload, list):
raise ValueError(f"Rule file {path.name} must contain a list field named 'payload'")
return [str(item).strip() for item in payload if str(item).strip()]
lines: list[str] = []
for line in path.read_text(encoding="utf-8").splitlines():
stripped = line.strip()
if not stripped or stripped.startswith("#"):
continue
lines.append(stripped)
return lines
def build_rule_provider_entries(app_config: AppConfig, client: ClientConfig, base_url: str, public_path: str):
providers: dict[str, dict] = {}
for name, rule in app_config.rules.items():
entry = {
"behavior": rule.behavior,
"format": rule.format,
"url": f"{base_url}/{public_path}/rules/{name}.yaml",
"interval": rule.interval,
}
providers[name] = entry
return providers
def build_rule_set_references(app_config: AppConfig, client: ClientConfig) -> list[str]:
refs: list[str] = []
for name, rule in app_config.rules.items():
target = resolve_policy(rule.policy, client)
line = f"RULE-SET,{name},{target}"
if rule.no_resolve:
line += ",no-resolve"
refs.append(line)
refs.append(f"MATCH,{client.main_policy}")
return refs
def build_inline_rules(app_config: AppConfig, client: ClientConfig) -> list[str]:
settings = get_settings()
lines: list[str] = []
for rule in app_config.rules.values():
path = (settings.rules_dir / rule.file).resolve()
if not path.is_file() or settings.rules_dir.resolve() not in path.parents:
raise FileNotFoundError(f"Rule file missing: {rule.file}")
target = resolve_policy(rule.policy, client)
for payload_line in load_rule_payload(path):
line = f"{payload_line},{target}"
if rule.no_resolve:
line += ",no-resolve"
lines.append(line)
lines.append(f"MATCH,{client.main_policy}")
return lines

View File

@@ -0,0 +1,177 @@
from __future__ import annotations
import re
from typing import Any, Iterable
import httpx
import yaml
from app.config import get_settings
from app.models import FetchResult, ProviderDocument, SourceConfig, SourceSnapshot
from app.services.cache import TTLCache
from app.services.headers import parse_subscription_userinfo
_fetch_cache: TTLCache[FetchResult] = TTLCache()
_provider_cache: TTLCache[ProviderDocument] = TTLCache()
_snapshot_cache: TTLCache[SourceSnapshot] = TTLCache()
async def fetch_source(name: str, source: SourceConfig) -> FetchResult:
settings = get_settings()
ttl = source.cache_ttl_seconds or settings.cache_ttl_seconds
cached = _fetch_cache.get(name)
if cached is not None:
return cached
headers = {"User-Agent": settings.default_user_agent}
headers.update(source.headers)
async with httpx.AsyncClient(timeout=settings.request_timeout_seconds, follow_redirects=True) as client:
response = await client.get(source.url, headers=headers)
response.raise_for_status()
result = FetchResult(text=response.text, headers=dict(response.headers))
_fetch_cache.set(name, result, ttl)
return result
async def build_provider_document(name: str, source: SourceConfig) -> ProviderDocument:
settings = get_settings()
ttl = source.cache_ttl_seconds or settings.cache_ttl_seconds
cache_key = f"provider:{name}"
cached = _provider_cache.get(cache_key)
if cached is not None:
return cached
fetched = await fetch_source(name, source)
if source.kind != "clash_yaml":
raise ValueError(f"Unsupported source kind: {source.kind}")
proxies = parse_clash_yaml_proxies(fetched.text)
proxies = transform_proxies(proxies, source, settings.max_proxy_name_length)
document = ProviderDocument(proxies=proxies)
_provider_cache.set(cache_key, document, ttl)
return document
async def build_source_snapshot(name: str, source: SourceConfig) -> SourceSnapshot:
settings = get_settings()
ttl = source.cache_ttl_seconds or settings.cache_ttl_seconds
cache_key = f"snapshot:{name}"
cached = _snapshot_cache.get(cache_key)
if cached is not None:
return cached
fetched = await fetch_source(name, source)
document = await build_provider_document(name, source)
snapshot = SourceSnapshot(
name=name,
display_name=source.display_name or name,
document=document,
headers=fetched.headers,
quota=parse_subscription_userinfo(fetched.headers),
)
_snapshot_cache.set(cache_key, snapshot, ttl)
return snapshot
async def build_source_snapshots(source_items: Iterable[tuple[str, SourceConfig]]) -> list[SourceSnapshot]:
snapshots: list[SourceSnapshot] = []
for name, source in source_items:
snapshots.append(await build_source_snapshot(name, source))
return snapshots
async def build_merged_provider_document(source_items: Iterable[tuple[str, SourceConfig]]) -> ProviderDocument:
snapshots = await build_source_snapshots(source_items)
proxies: list[dict[str, Any]] = []
seen: set[str] = set()
for snapshot in snapshots:
for proxy in snapshot.document.proxies:
candidate = dict(proxy)
name = str(candidate.get("name", "")).strip()
if not name:
continue
original = name
index = 2
while name in seen:
name = f"{original} #{index}"
index += 1
candidate["name"] = name
seen.add(name)
proxies.append(candidate)
return ProviderDocument(proxies=proxies)
async def get_first_quota(source_items: Iterable[tuple[str, SourceConfig]]):
source_list = list(source_items)
if not source_list:
return None
snapshot = await build_source_snapshot(source_list[0][0], source_list[0][1])
return snapshot.quota
def parse_clash_yaml_proxies(text: str) -> list[dict[str, Any]]:
data = yaml.safe_load(text)
if not isinstance(data, dict):
raise ValueError("Upstream YAML must be a mapping with a top-level 'proxies' field")
proxies = data.get("proxies")
if not isinstance(proxies, list):
raise ValueError("Upstream YAML must contain a list field named 'proxies'")
normalized: list[dict[str, Any]] = []
for item in proxies:
if not isinstance(item, dict):
continue
if not item.get("name") or not item.get("type"):
continue
normalized.append(item)
return normalized
def transform_proxies(
proxies: list[dict[str, Any]], source: SourceConfig, max_proxy_name_length: int
) -> list[dict[str, Any]]:
include = re.compile(source.include_regex) if source.include_regex else None
exclude = re.compile(source.exclude_regex) if source.exclude_regex else None
transformed: list[dict[str, Any]] = []
seen: dict[str, int] = {}
for proxy in proxies:
name = str(proxy.get("name", "")).strip()
if not name:
continue
if include and not include.search(name):
continue
if exclude and exclude.search(name):
continue
new_proxy = dict(proxy)
new_name = f"{source.prefix}{name}{source.suffix}".strip()
if len(new_name) > max_proxy_name_length:
new_name = new_name[:max_proxy_name_length].rstrip()
count = seen.get(new_name, 0) + 1
seen[new_name] = count
if count > 1:
new_name = f"{new_name} #{count}"
new_proxy["name"] = new_name
transformed.append(new_proxy)
return transformed
def dump_provider_yaml(document: ProviderDocument) -> str:
return yaml.safe_dump(
{"proxies": document.proxies},
allow_unicode=True,
sort_keys=False,
default_flow_style=False,
)