init
This commit is contained in:
16
.env.example
Normal file
16
.env.example
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
APP_NAME=sub-provider
|
||||||
|
APP_ENV=prod
|
||||||
|
HOST=0.0.0.0
|
||||||
|
PORT=18080
|
||||||
|
LOG_LEVEL=info
|
||||||
|
|
||||||
|
# 对外访问前缀,尽量改成足够长的随机字符串
|
||||||
|
PUBLIC_PATH=change-me-random-hash-path
|
||||||
|
|
||||||
|
# 如反代后域名不是容器本机可感知到的地址,建议显式填写
|
||||||
|
# 例如:https://sub.example.com
|
||||||
|
PUBLIC_BASE_URL=
|
||||||
|
|
||||||
|
# 上游这里先放“能直接返回 Clash/Mihomo YAML proxies 文件”的地址
|
||||||
|
AIRPORT_A_URL=https://example.com/airport-a.yaml
|
||||||
|
AIRPORT_B_URL=https://example.com/airport-b.yaml
|
||||||
209
.gitignore
vendored
Normal file
209
.gitignore
vendored
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[codz]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
.idea/
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py.cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# UV
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
#uv.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
#poetry.toml
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||||
|
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||||
|
#pdm.lock
|
||||||
|
#pdm.toml
|
||||||
|
.pdm-python
|
||||||
|
.pdm-build/
|
||||||
|
|
||||||
|
# pixi
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||||
|
#pixi.lock
|
||||||
|
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||||
|
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||||
|
.pixi
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.envrc
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
# Abstra
|
||||||
|
# Abstra is an AI-powered process automation framework.
|
||||||
|
# Ignore directories containing user credentials, local state, and settings.
|
||||||
|
# Learn more at https://abstra.io/docs
|
||||||
|
.abstra/
|
||||||
|
|
||||||
|
# Visual Studio Code
|
||||||
|
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||||
|
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||||
|
# you could uncomment the following to ignore the entire vscode folder
|
||||||
|
# .vscode/
|
||||||
|
|
||||||
|
# Ruff stuff:
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# PyPI configuration file
|
||||||
|
.pypirc
|
||||||
|
|
||||||
|
# Cursor
|
||||||
|
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
|
||||||
|
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
|
||||||
|
# refer to https://docs.cursor.com/context/ignore-files
|
||||||
|
.cursorignore
|
||||||
|
.cursorindexingignore
|
||||||
|
|
||||||
|
# Marimo
|
||||||
|
marimo/_static/
|
||||||
|
marimo/_lsp/
|
||||||
|
__marimo__/
|
||||||
18
Dockerfile
Normal file
18
Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
|
PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY requirements.txt /app/requirements.txt
|
||||||
|
RUN pip install --no-cache-dir --upgrade pip && \
|
||||||
|
pip install --no-cache-dir -r /app/requirements.txt
|
||||||
|
|
||||||
|
COPY app /app/app
|
||||||
|
COPY config /app/config
|
||||||
|
COPY .env.example /app/.env.example
|
||||||
|
|
||||||
|
EXPOSE 18080
|
||||||
|
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "18080"]
|
||||||
178
README.md
178
README.md
@@ -1,2 +1,180 @@
|
|||||||
# sub-provider
|
# sub-provider
|
||||||
|
|
||||||
|
一个可部署的订阅聚合后端模板,目标是同时支持两种输出方式:
|
||||||
|
|
||||||
|
- **薄壳模式**:客户端拉取 `/clients/*.yaml`,配置内继续引用远程 `proxy-providers` / `rule-providers`
|
||||||
|
- **打包模式**:客户端拉取 `/bundle/*.yaml`,服务端把节点、策略组、规则全部铺开成单文件 YAML
|
||||||
|
|
||||||
|
这一版已经补上:
|
||||||
|
|
||||||
|
- 多机场源选择:`?sources=airport-a,airport-b`
|
||||||
|
- 单机场或多机场时,**始终取第一个源**的 `Subscription-Userinfo` 返回给客户端
|
||||||
|
- `GET` 和 `HEAD` 都支持
|
||||||
|
- provider 单源输出、merged provider 输出、thin client 输出、bundle 输出
|
||||||
|
- 服务端内部继续解耦:抓取、配额头解析、provider 构建、规则加载、profile 组装分层处理
|
||||||
|
|
||||||
|
> 当前版本仍然只支持上游已经能返回 Clash/Mihomo YAML `proxies:` 文件的地址。
|
||||||
|
> 这很适合先接你现有的 `sub-wrapper` / `sub-store` / `subconverter(-extended)` 输出。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 目录结构
|
||||||
|
|
||||||
|
```text
|
||||||
|
sub-provider/
|
||||||
|
app/
|
||||||
|
config.py
|
||||||
|
main.py
|
||||||
|
models.py
|
||||||
|
services/
|
||||||
|
cache.py
|
||||||
|
headers.py
|
||||||
|
loader.py
|
||||||
|
profiles.py
|
||||||
|
rules.py
|
||||||
|
subscriptions.py
|
||||||
|
config/
|
||||||
|
sources.yaml
|
||||||
|
rules/
|
||||||
|
reject.yaml
|
||||||
|
direct.yaml
|
||||||
|
proxy.yaml
|
||||||
|
cn-ip.yaml
|
||||||
|
data/
|
||||||
|
cache/
|
||||||
|
.env.example
|
||||||
|
Dockerfile
|
||||||
|
docker-compose.yaml
|
||||||
|
requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 快速开始
|
||||||
|
|
||||||
|
1. 复制环境变量文件:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
|
||||||
|
2. 编辑 `.env`:
|
||||||
|
|
||||||
|
- `PUBLIC_PATH` 改成足够长的随机字符串
|
||||||
|
- `PUBLIC_BASE_URL` 建议填写你反代后的最终访问地址,例如 `https://sub.example.com`
|
||||||
|
- `AIRPORT_A_URL` / `AIRPORT_B_URL` 改成你的上游 YAML provider 地址
|
||||||
|
|
||||||
|
3. 启动:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker compose up -d --build
|
||||||
|
```
|
||||||
|
|
||||||
|
4. 访问检查:
|
||||||
|
|
||||||
|
- 健康检查:`http://YOUR_HOST:18080/healthz`
|
||||||
|
- 单 provider:
|
||||||
|
`https://YOUR_DOMAIN/<PUBLIC_PATH>/providers/airport-a.yaml`
|
||||||
|
- merged provider:
|
||||||
|
`https://YOUR_DOMAIN/<PUBLIC_PATH>/providers/merged.yaml?sources=airport-a,airport-b`
|
||||||
|
- Mihomo/OpenClash 薄壳入口:
|
||||||
|
`https://YOUR_DOMAIN/<PUBLIC_PATH>/clients/mihomo.yaml?sources=airport-a,airport-b`
|
||||||
|
- Stash 薄壳入口:
|
||||||
|
`https://YOUR_DOMAIN/<PUBLIC_PATH>/clients/stash.yaml?sources=airport-a,airport-b`
|
||||||
|
- Mihomo/OpenClash bundle:
|
||||||
|
`https://YOUR_DOMAIN/<PUBLIC_PATH>/bundle/mihomo.yaml?sources=airport-a,airport-b`
|
||||||
|
- Stash bundle:
|
||||||
|
`https://YOUR_DOMAIN/<PUBLIC_PATH>/bundle/stash.yaml?sources=airport-a,airport-b`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 接口说明
|
||||||
|
|
||||||
|
### 1. 单 provider
|
||||||
|
|
||||||
|
```text
|
||||||
|
GET /<PUBLIC_PATH>/providers/{name}.yaml
|
||||||
|
HEAD /<PUBLIC_PATH>/providers/{name}.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
返回指定机场源的 provider 文件,并携带这个源的 `Subscription-Userinfo`(如果上游有)。
|
||||||
|
|
||||||
|
### 2. merged provider
|
||||||
|
|
||||||
|
```text
|
||||||
|
GET /<PUBLIC_PATH>/providers/merged.yaml?sources=airport-a,airport-b
|
||||||
|
HEAD /<PUBLIC_PATH>/providers/merged.yaml?sources=airport-a,airport-b
|
||||||
|
```
|
||||||
|
|
||||||
|
把多个 provider 合并成一个 `proxies:` 文件返回。响应头只取 `sources` 参数里**第一个源**的配额信息。
|
||||||
|
|
||||||
|
### 3. 薄壳客户端配置
|
||||||
|
|
||||||
|
```text
|
||||||
|
GET /<PUBLIC_PATH>/clients/mihomo.yaml?sources=airport-a,airport-b
|
||||||
|
GET /<PUBLIC_PATH>/clients/stash.yaml?sources=airport-a,airport-b
|
||||||
|
HEAD /<PUBLIC_PATH>/clients/mihomo.yaml?sources=airport-a,airport-b
|
||||||
|
HEAD /<PUBLIC_PATH>/clients/stash.yaml?sources=airport-a,airport-b
|
||||||
|
```
|
||||||
|
|
||||||
|
特点:
|
||||||
|
|
||||||
|
- 客户端收到的是轻量入口配置
|
||||||
|
- 节点更新依赖远程 `proxy-providers`
|
||||||
|
- 规则更新依赖远程 `rule-providers`
|
||||||
|
- 响应头同样只取第一个源的 `Subscription-Userinfo`
|
||||||
|
|
||||||
|
### 4. bundle 单文件配置
|
||||||
|
|
||||||
|
```text
|
||||||
|
GET /<PUBLIC_PATH>/bundle/mihomo.yaml?sources=airport-a,airport-b
|
||||||
|
GET /<PUBLIC_PATH>/bundle/stash.yaml?sources=airport-a,airport-b
|
||||||
|
HEAD /<PUBLIC_PATH>/bundle/mihomo.yaml?sources=airport-a,airport-b
|
||||||
|
HEAD /<PUBLIC_PATH>/bundle/stash.yaml?sources=airport-a,airport-b
|
||||||
|
```
|
||||||
|
|
||||||
|
特点:
|
||||||
|
|
||||||
|
- 服务端把节点、策略组、规则全部展开到一个 YAML 里
|
||||||
|
- 适合想直接给 Mihomo Party / Clash Party / Stash 一个最终链接的场景
|
||||||
|
- 响应头同样只取第一个源的 `Subscription-Userinfo`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 默认策略结构
|
||||||
|
|
||||||
|
当前默认生成的策略组是一个基础版:
|
||||||
|
|
||||||
|
- `☁️ 机场选择`
|
||||||
|
- `♻️ 自动选择`
|
||||||
|
- `🚀 手动切换`
|
||||||
|
- `🇭🇰 香港自动`
|
||||||
|
- `🇸🇬 新加坡自动`
|
||||||
|
- `🇯🇵 日本自动`
|
||||||
|
- `🇺🇸 美国自动`
|
||||||
|
- `节点选择`
|
||||||
|
|
||||||
|
其中:
|
||||||
|
|
||||||
|
- `☁️ 机场选择` 允许在“混合自动”和各机场单独自动组之间切换
|
||||||
|
- `节点选择` 是最终主策略组
|
||||||
|
- bundle 模式会把节点名全部展开
|
||||||
|
- thin 模式会保留 provider 引用关系
|
||||||
|
|
||||||
|
你后面要继续进阶的话,最值得加的是:
|
||||||
|
|
||||||
|
- `policies.yaml`:把 Telegram / AI / YouTube / Netflix 这类业务组模板化
|
||||||
|
- `regions.yaml`:把更多地区从 `sources.yaml` 独立出去
|
||||||
|
- URI/base64 原始订阅解析
|
||||||
|
- 鉴权层(例如前置 Caddy/Nginx Basic Auth 或仅 Tailscale 可访问)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 配额头策略
|
||||||
|
|
||||||
|
为了避免聚合多个机场后“总流量怎么显示”语义混乱,这个版本统一采用:
|
||||||
|
|
||||||
|
- `sources` 只填一个机场源:返回这个机场源的配额信息
|
||||||
|
- `sources` 填多个机场源:**只取第一个**机场源的 `Subscription-Userinfo`
|
||||||
|
|
||||||
|
这样 Stash、Clash Party 这类客户端读取配置订阅头时,行为是稳定可预期的。
|
||||||
|
|||||||
1
app/__init__.py
Normal file
1
app/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
__all__ = []
|
||||||
41
app/config.py
Normal file
41
app/config.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from functools import lru_cache
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
ROOT_DIR = Path(__file__).resolve().parent.parent
|
||||||
|
CONFIG_DIR = ROOT_DIR / "config"
|
||||||
|
DATA_DIR = ROOT_DIR / "data"
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
app_name: str = "sub-provider"
|
||||||
|
app_env: str = "prod"
|
||||||
|
host: str = "0.0.0.0"
|
||||||
|
port: int = 18080
|
||||||
|
log_level: str = "info"
|
||||||
|
|
||||||
|
public_path: str = Field(default="change-me-random-hash-path")
|
||||||
|
public_base_url: str | None = None
|
||||||
|
request_timeout_seconds: float = 20.0
|
||||||
|
cache_ttl_seconds: int = 900
|
||||||
|
max_proxy_name_length: int = 80
|
||||||
|
default_user_agent: str = "sub-provider/0.2"
|
||||||
|
|
||||||
|
sources_file: Path = CONFIG_DIR / "sources.yaml"
|
||||||
|
rules_dir: Path = CONFIG_DIR / "rules"
|
||||||
|
|
||||||
|
model_config = SettingsConfigDict(
|
||||||
|
env_file=ROOT_DIR / ".env",
|
||||||
|
env_file_encoding="utf-8",
|
||||||
|
extra="ignore",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_settings() -> Settings:
|
||||||
|
return Settings()
|
||||||
167
app/main.py
Normal file
167
app/main.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from fastapi import FastAPI, HTTPException, Query, Request
|
||||||
|
from fastapi.responses import Response
|
||||||
|
|
||||||
|
from app.config import get_settings
|
||||||
|
from app.models import RuleConfig, SourceConfig
|
||||||
|
from app.services.loader import load_app_config
|
||||||
|
from app.services.profiles import build_bundle_profile, build_thin_profile, dump_yaml
|
||||||
|
from app.services.rules import load_rule_text
|
||||||
|
from app.services.subscriptions import (
|
||||||
|
build_merged_provider_document,
|
||||||
|
build_provider_document,
|
||||||
|
build_source_snapshots,
|
||||||
|
dump_provider_yaml,
|
||||||
|
get_first_quota,
|
||||||
|
)
|
||||||
|
|
||||||
|
settings = get_settings()
|
||||||
|
app = FastAPI(title=settings.app_name)
|
||||||
|
app_config = load_app_config(settings.sources_file)
|
||||||
|
PUBLIC_PREFIX = "/" + (app_config.public_path or settings.public_path).strip("/")
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/healthz")
|
||||||
|
async def healthz() -> dict[str, str]:
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
def _base_url(request: Request) -> str:
|
||||||
|
if settings.public_base_url:
|
||||||
|
return settings.public_base_url.rstrip("/")
|
||||||
|
return str(request.base_url).rstrip("/")
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_sources(sources: str | None) -> list[tuple[str, SourceConfig]]:
|
||||||
|
enabled = [(name, src) for name, src in app_config.sources.items() if src.enabled]
|
||||||
|
if not sources:
|
||||||
|
return enabled
|
||||||
|
|
||||||
|
names = [item.strip() for item in sources.split(",") if item.strip()]
|
||||||
|
selected: list[tuple[str, SourceConfig]] = []
|
||||||
|
seen: set[str] = set()
|
||||||
|
for name in names:
|
||||||
|
if name in seen:
|
||||||
|
continue
|
||||||
|
source = app_config.sources.get(name)
|
||||||
|
if source is None or not source.enabled:
|
||||||
|
raise HTTPException(status_code=404, detail=f"source not found or disabled: {name}")
|
||||||
|
selected.append((name, source))
|
||||||
|
seen.add(name)
|
||||||
|
if not selected:
|
||||||
|
raise HTTPException(status_code=400, detail="no sources selected")
|
||||||
|
return selected
|
||||||
|
|
||||||
|
|
||||||
|
def _rule_path(rule: RuleConfig):
|
||||||
|
path = (settings.rules_dir / rule.file).resolve()
|
||||||
|
if not path.is_file() or settings.rules_dir.resolve() not in path.parents:
|
||||||
|
raise HTTPException(status_code=404, detail="rule file missing")
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
async def _build_quota_headers(source_items: list[tuple[str, SourceConfig]]) -> dict[str, str]:
|
||||||
|
headers: dict[str, str] = {}
|
||||||
|
quota = await get_first_quota(source_items)
|
||||||
|
if quota and not quota.is_empty():
|
||||||
|
headers["Subscription-Userinfo"] = quota.to_header_value()
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
|
def _yaml_response(content: str, request: Request, headers: dict[str, str] | None = None, filename: str | None = None) -> Response:
|
||||||
|
final_headers = {
|
||||||
|
"Content-Type": "text/yaml; charset=utf-8",
|
||||||
|
"Cache-Control": "no-store",
|
||||||
|
}
|
||||||
|
if headers:
|
||||||
|
final_headers.update(headers)
|
||||||
|
if filename:
|
||||||
|
final_headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{filename}"
|
||||||
|
body = "" if request.method == "HEAD" else content
|
||||||
|
return Response(content=body, media_type="text/yaml; charset=utf-8", headers=final_headers)
|
||||||
|
|
||||||
|
|
||||||
|
@app.api_route(PUBLIC_PREFIX + "/providers/merged.yaml", methods=["GET", "HEAD"])
|
||||||
|
async def merged_provider(request: Request, sources: str | None = Query(default=None)) -> Response:
|
||||||
|
source_items = _resolve_sources(sources)
|
||||||
|
try:
|
||||||
|
document = await build_merged_provider_document(source_items)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
raise HTTPException(status_code=502, detail=f"failed to build merged provider: {exc}") from exc
|
||||||
|
|
||||||
|
content = dump_provider_yaml(document)
|
||||||
|
headers = await _build_quota_headers(source_items)
|
||||||
|
return _yaml_response(content, request, headers=headers, filename="merged.yaml")
|
||||||
|
|
||||||
|
|
||||||
|
@app.api_route(PUBLIC_PREFIX + "/providers/{name}.yaml", methods=["GET", "HEAD"])
|
||||||
|
async def provider(name: str, request: Request) -> Response:
|
||||||
|
source = app_config.sources.get(name)
|
||||||
|
if source is None or not source.enabled:
|
||||||
|
raise HTTPException(status_code=404, detail="provider not found")
|
||||||
|
|
||||||
|
try:
|
||||||
|
document = await build_provider_document(name, source)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
raise HTTPException(status_code=502, detail=f"failed to build provider: {exc}") from exc
|
||||||
|
|
||||||
|
content = dump_provider_yaml(document)
|
||||||
|
headers = await _build_quota_headers([(name, source)])
|
||||||
|
return _yaml_response(content, request, headers=headers, filename=f"{name}.yaml")
|
||||||
|
|
||||||
|
|
||||||
|
@app.api_route(PUBLIC_PREFIX + "/rules/{name}.yaml", methods=["GET", "HEAD"])
|
||||||
|
async def rule_file(name: str, request: Request) -> Response:
|
||||||
|
rule = app_config.rules.get(name)
|
||||||
|
if rule is None:
|
||||||
|
raise HTTPException(status_code=404, detail="rule not found")
|
||||||
|
content = load_rule_text(_rule_path(rule))
|
||||||
|
return _yaml_response(content, request, filename=f"{name}.yaml")
|
||||||
|
|
||||||
|
|
||||||
|
@app.api_route(PUBLIC_PREFIX + "/clients/{client_type}.yaml", methods=["GET", "HEAD"])
|
||||||
|
async def client_profile(client_type: str, request: Request, sources: str | None = Query(default=None)) -> Response:
|
||||||
|
client = app_config.clients.get(client_type)
|
||||||
|
if client is None:
|
||||||
|
raise HTTPException(status_code=404, detail="client config not found")
|
||||||
|
|
||||||
|
source_items = _resolve_sources(sources)
|
||||||
|
content = dump_yaml(
|
||||||
|
build_thin_profile(
|
||||||
|
client_type=client_type,
|
||||||
|
app_config=app_config,
|
||||||
|
client=client,
|
||||||
|
selected_source_names=[name for name, _ in source_items],
|
||||||
|
base_url=_base_url(request),
|
||||||
|
public_path=(app_config.public_path or settings.public_path).strip("/"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
headers = {"profile-update-interval": str(client.provider_interval)}
|
||||||
|
headers.update(await _build_quota_headers(source_items))
|
||||||
|
return _yaml_response(content, request, headers=headers, filename=f"{client_type}.yaml")
|
||||||
|
|
||||||
|
|
||||||
|
@app.api_route(PUBLIC_PREFIX + "/bundle/{client_type}.yaml", methods=["GET", "HEAD"])
|
||||||
|
async def bundle_profile(client_type: str, request: Request, sources: str | None = Query(default=None)) -> Response:
|
||||||
|
client = app_config.clients.get(client_type)
|
||||||
|
if client is None:
|
||||||
|
raise HTTPException(status_code=404, detail="client config not found")
|
||||||
|
|
||||||
|
source_items = _resolve_sources(sources)
|
||||||
|
try:
|
||||||
|
snapshots = await build_source_snapshots(source_items)
|
||||||
|
except Exception as exc: # noqa: BLE001
|
||||||
|
raise HTTPException(status_code=502, detail=f"failed to build bundle: {exc}") from exc
|
||||||
|
|
||||||
|
content = dump_yaml(
|
||||||
|
build_bundle_profile(
|
||||||
|
client_type=client_type,
|
||||||
|
app_config=app_config,
|
||||||
|
client=client,
|
||||||
|
snapshots=snapshots,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
headers = {"profile-update-interval": str(client.provider_interval)}
|
||||||
|
headers.update(await _build_quota_headers(source_items))
|
||||||
|
return _yaml_response(content, request, headers=headers, filename=f"bundle-{client_type}.yaml")
|
||||||
99
app/models.py
Normal file
99
app/models.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field, HttpUrl
|
||||||
|
|
||||||
|
|
||||||
|
class SourceConfig(BaseModel):
|
||||||
|
enabled: bool = True
|
||||||
|
kind: Literal["clash_yaml"] = "clash_yaml"
|
||||||
|
url: str
|
||||||
|
display_name: str | None = None
|
||||||
|
headers: dict[str, str] = Field(default_factory=dict)
|
||||||
|
include_regex: str | None = None
|
||||||
|
exclude_regex: str | None = None
|
||||||
|
prefix: str = ""
|
||||||
|
suffix: str = ""
|
||||||
|
cache_ttl_seconds: int | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class RuleConfig(BaseModel):
|
||||||
|
file: str
|
||||||
|
behavior: Literal["domain", "ipcidr", "classical"] = "domain"
|
||||||
|
format: Literal["yaml", "text", "mrs"] = "yaml"
|
||||||
|
interval: int = 86400
|
||||||
|
policy: str
|
||||||
|
no_resolve: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
class RegionConfig(BaseModel):
|
||||||
|
name: str
|
||||||
|
filter: str
|
||||||
|
tolerance: int = 50
|
||||||
|
|
||||||
|
|
||||||
|
class ClientConfig(BaseModel):
|
||||||
|
title: str
|
||||||
|
provider_interval: int = 21600
|
||||||
|
rule_interval: int = 86400
|
||||||
|
test_url: HttpUrl = "https://www.gstatic.com/generate_204"
|
||||||
|
test_interval: int = 300
|
||||||
|
main_policy: str = "节点选择"
|
||||||
|
source_policy: str = "☁️ 机场选择"
|
||||||
|
mixed_auto_policy: str = "♻️ 自动选择"
|
||||||
|
manual_policy: str = "🚀 手动切换"
|
||||||
|
direct_policy: str = "DIRECT"
|
||||||
|
mode: str = "rule"
|
||||||
|
allow_lan: bool = True
|
||||||
|
ipv6: bool = True
|
||||||
|
mixed_port: int | None = 7890
|
||||||
|
socks_port: int | None = 7891
|
||||||
|
log_level: str | None = "info"
|
||||||
|
|
||||||
|
|
||||||
|
class AppConfig(BaseModel):
|
||||||
|
public_path: str | None = None
|
||||||
|
sources: dict[str, SourceConfig]
|
||||||
|
rules: dict[str, RuleConfig] = Field(default_factory=dict)
|
||||||
|
clients: dict[str, ClientConfig] = Field(default_factory=dict)
|
||||||
|
regions: dict[str, RegionConfig] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class FetchResult(BaseModel):
|
||||||
|
text: str
|
||||||
|
headers: dict[str, str] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderDocument(BaseModel):
|
||||||
|
proxies: list[dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
|
class SubscriptionUserInfo(BaseModel):
|
||||||
|
upload: int | None = None
|
||||||
|
download: int | None = None
|
||||||
|
total: int | None = None
|
||||||
|
expire: int | None = None
|
||||||
|
|
||||||
|
def to_header_value(self) -> str:
|
||||||
|
parts: list[str] = []
|
||||||
|
if self.upload is not None:
|
||||||
|
parts.append(f"upload={self.upload}")
|
||||||
|
if self.download is not None:
|
||||||
|
parts.append(f"download={self.download}")
|
||||||
|
if self.total is not None:
|
||||||
|
parts.append(f"total={self.total}")
|
||||||
|
if self.expire is not None:
|
||||||
|
parts.append(f"expire={self.expire}")
|
||||||
|
return "; ".join(parts)
|
||||||
|
|
||||||
|
def is_empty(self) -> bool:
|
||||||
|
return self.upload is None and self.download is None and self.total is None and self.expire is None
|
||||||
|
|
||||||
|
|
||||||
|
class SourceSnapshot(BaseModel):
|
||||||
|
name: str
|
||||||
|
display_name: str
|
||||||
|
document: ProviderDocument
|
||||||
|
headers: dict[str, str] = Field(default_factory=dict)
|
||||||
|
quota: SubscriptionUserInfo | None = None
|
||||||
30
app/services/cache.py
Normal file
30
app/services/cache.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Generic, TypeVar
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CacheItem(Generic[T]):
|
||||||
|
value: T
|
||||||
|
expires_at: float
|
||||||
|
|
||||||
|
|
||||||
|
class TTLCache(Generic[T]):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._store: dict[str, CacheItem[T]] = {}
|
||||||
|
|
||||||
|
def get(self, key: str) -> T | None:
|
||||||
|
item = self._store.get(key)
|
||||||
|
if item is None:
|
||||||
|
return None
|
||||||
|
if item.expires_at < time.time():
|
||||||
|
self._store.pop(key, None)
|
||||||
|
return None
|
||||||
|
return item.value
|
||||||
|
|
||||||
|
def set(self, key: str, value: T, ttl_seconds: int) -> None:
|
||||||
|
self._store[key] = CacheItem(value=value, expires_at=time.time() + ttl_seconds)
|
||||||
29
app/services/headers.py
Normal file
29
app/services/headers.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Mapping
|
||||||
|
|
||||||
|
from app.models import SubscriptionUserInfo
|
||||||
|
|
||||||
|
_SUBSCRIPTION_FIELDS = re.compile(r"(upload|download|total|expire)=(\d+)")
|
||||||
|
|
||||||
|
|
||||||
|
def get_header_case_insensitive(headers: Mapping[str, str], name: str) -> str | None:
|
||||||
|
target = name.lower()
|
||||||
|
for key, value in headers.items():
|
||||||
|
if key.lower() == target:
|
||||||
|
return value
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def parse_subscription_userinfo(headers: Mapping[str, str]) -> SubscriptionUserInfo | None:
|
||||||
|
raw = get_header_case_insensitive(headers, "Subscription-Userinfo")
|
||||||
|
if not raw:
|
||||||
|
return None
|
||||||
|
|
||||||
|
values: dict[str, int] = {}
|
||||||
|
for key, value in _SUBSCRIPTION_FIELDS.findall(raw):
|
||||||
|
values[key] = int(value)
|
||||||
|
|
||||||
|
info = SubscriptionUserInfo(**values)
|
||||||
|
return None if info.is_empty() else info
|
||||||
27
app/services/loader.py
Normal file
27
app/services/loader.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from app.models import AppConfig
|
||||||
|
|
||||||
|
_ENV_PATTERN = re.compile(r"\$\{([A-Z0-9_]+)\}")
|
||||||
|
|
||||||
|
|
||||||
|
def _expand_env(value):
|
||||||
|
if isinstance(value, str):
|
||||||
|
return _ENV_PATTERN.sub(lambda m: os.getenv(m.group(1), ""), value)
|
||||||
|
if isinstance(value, list):
|
||||||
|
return [_expand_env(v) for v in value]
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return {k: _expand_env(v) for k, v in value.items()}
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def load_app_config(path: Path) -> AppConfig:
|
||||||
|
raw = yaml.safe_load(path.read_text(encoding="utf-8")) or {}
|
||||||
|
expanded = _expand_env(raw)
|
||||||
|
return AppConfig.model_validate(expanded)
|
||||||
275
app/services/profiles.py
Normal file
275
app/services/profiles.py
Normal file
@@ -0,0 +1,275 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from app.models import AppConfig, ClientConfig, SourceSnapshot
|
||||||
|
from app.services.rules import build_inline_rules, build_rule_provider_entries, build_rule_set_references
|
||||||
|
|
||||||
|
|
||||||
|
def dump_yaml(data: dict[str, Any]) -> str:
|
||||||
|
return yaml.safe_dump(data, allow_unicode=True, sort_keys=False, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def build_thin_profile(
|
||||||
|
*,
|
||||||
|
client_type: str,
|
||||||
|
app_config: AppConfig,
|
||||||
|
client: ClientConfig,
|
||||||
|
selected_source_names: list[str],
|
||||||
|
base_url: str,
|
||||||
|
public_path: str,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
profile: dict[str, Any] = {
|
||||||
|
"mode": client.mode,
|
||||||
|
"ipv6": client.ipv6,
|
||||||
|
}
|
||||||
|
if client.log_level:
|
||||||
|
profile["log-level"] = client.log_level
|
||||||
|
if client_type == "mihomo":
|
||||||
|
if client.mixed_port is not None:
|
||||||
|
profile["mixed-port"] = client.mixed_port
|
||||||
|
if client.socks_port is not None:
|
||||||
|
profile["socks-port"] = client.socks_port
|
||||||
|
profile["allow-lan"] = client.allow_lan
|
||||||
|
proxy_providers: dict[str, dict[str, Any]] = {}
|
||||||
|
for name in selected_source_names:
|
||||||
|
if client_type == "mihomo":
|
||||||
|
proxy_providers[name] = {
|
||||||
|
"type": "http",
|
||||||
|
"url": f"{base_url}/{public_path}/providers/{name}.yaml",
|
||||||
|
"path": f"./providers/{name}.yaml",
|
||||||
|
"interval": client.provider_interval,
|
||||||
|
"health-check": {
|
||||||
|
"enable": True,
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
proxy_providers[name] = {
|
||||||
|
"url": f"{base_url}/{public_path}/providers/{name}.yaml",
|
||||||
|
"interval": client.provider_interval,
|
||||||
|
}
|
||||||
|
profile["proxy-providers"] = proxy_providers
|
||||||
|
profile["proxy-groups"] = _build_thin_groups(client_type, app_config, client, selected_source_names)
|
||||||
|
profile["rule-providers"] = build_rule_provider_entries(app_config, client, base_url, public_path)
|
||||||
|
profile["rules"] = build_rule_set_references(app_config, client)
|
||||||
|
return profile
|
||||||
|
|
||||||
|
|
||||||
|
def build_bundle_profile(
|
||||||
|
*,
|
||||||
|
client_type: str,
|
||||||
|
app_config: AppConfig,
|
||||||
|
client: ClientConfig,
|
||||||
|
snapshots: list[SourceSnapshot],
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
profile: dict[str, Any] = {
|
||||||
|
"mode": client.mode,
|
||||||
|
"ipv6": client.ipv6,
|
||||||
|
}
|
||||||
|
if client.log_level:
|
||||||
|
profile["log-level"] = client.log_level
|
||||||
|
if client_type == "mihomo":
|
||||||
|
if client.mixed_port is not None:
|
||||||
|
profile["mixed-port"] = client.mixed_port
|
||||||
|
if client.socks_port is not None:
|
||||||
|
profile["socks-port"] = client.socks_port
|
||||||
|
profile["allow-lan"] = client.allow_lan
|
||||||
|
|
||||||
|
proxies: list[dict[str, Any]] = []
|
||||||
|
source_proxy_names: dict[str, list[str]] = {}
|
||||||
|
seen: set[str] = set()
|
||||||
|
for snapshot in snapshots:
|
||||||
|
names: list[str] = []
|
||||||
|
for proxy in snapshot.document.proxies:
|
||||||
|
candidate = dict(proxy)
|
||||||
|
name = str(candidate.get("name", "")).strip()
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
original = name
|
||||||
|
index = 2
|
||||||
|
while name in seen:
|
||||||
|
name = f"{original} #{index}"
|
||||||
|
index += 1
|
||||||
|
seen.add(name)
|
||||||
|
candidate["name"] = name
|
||||||
|
proxies.append(candidate)
|
||||||
|
names.append(name)
|
||||||
|
source_proxy_names[snapshot.name] = names
|
||||||
|
|
||||||
|
profile["proxies"] = proxies
|
||||||
|
profile["proxy-groups"] = _build_bundle_groups(app_config, client, snapshots, source_proxy_names)
|
||||||
|
profile["rules"] = build_inline_rules(app_config, client)
|
||||||
|
return profile
|
||||||
|
|
||||||
|
|
||||||
|
def _build_thin_groups(client_type: str, app_config: AppConfig, client: ClientConfig, selected_source_names: list[str]) -> list[dict[str, Any]]:
|
||||||
|
groups: list[dict[str, Any]] = []
|
||||||
|
source_auto_names: list[str] = []
|
||||||
|
|
||||||
|
for source_name in selected_source_names:
|
||||||
|
display_name = app_config.sources[source_name].display_name or source_name
|
||||||
|
group_name = f"{display_name} 自动"
|
||||||
|
source_auto_names.append(group_name)
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": group_name,
|
||||||
|
"type": "url-test",
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
"use": [source_name],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if client_type == "mihomo":
|
||||||
|
mixed_auto = {
|
||||||
|
"name": client.mixed_auto_policy,
|
||||||
|
"type": "url-test",
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
"include-all-providers": True,
|
||||||
|
}
|
||||||
|
manual = {
|
||||||
|
"name": client.manual_policy,
|
||||||
|
"type": "select",
|
||||||
|
"proxies": [client.direct_policy],
|
||||||
|
"include-all-providers": True,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
mixed_auto = {
|
||||||
|
"name": client.mixed_auto_policy,
|
||||||
|
"type": "url-test",
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
"include-all": True,
|
||||||
|
}
|
||||||
|
manual = {
|
||||||
|
"name": client.manual_policy,
|
||||||
|
"type": "select",
|
||||||
|
"proxies": [client.direct_policy],
|
||||||
|
"include-all": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
groups.append(mixed_auto)
|
||||||
|
|
||||||
|
region_names: list[str] = []
|
||||||
|
for region in app_config.regions.values():
|
||||||
|
group = {
|
||||||
|
"name": region.name,
|
||||||
|
"type": "url-test",
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
"filter": region.filter,
|
||||||
|
"tolerance": region.tolerance,
|
||||||
|
}
|
||||||
|
if client_type == "mihomo":
|
||||||
|
group["include-all-providers"] = True
|
||||||
|
else:
|
||||||
|
group["include-all"] = True
|
||||||
|
groups.append(group)
|
||||||
|
region_names.append(region.name)
|
||||||
|
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": client.source_policy,
|
||||||
|
"type": "select",
|
||||||
|
"proxies": [client.mixed_auto_policy, *source_auto_names, client.direct_policy],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
groups.append(manual)
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": client.main_policy,
|
||||||
|
"type": "select",
|
||||||
|
"proxies": [
|
||||||
|
client.source_policy,
|
||||||
|
client.mixed_auto_policy,
|
||||||
|
*region_names,
|
||||||
|
client.manual_policy,
|
||||||
|
client.direct_policy,
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return groups
|
||||||
|
|
||||||
|
|
||||||
|
def _build_bundle_groups(
|
||||||
|
app_config: AppConfig,
|
||||||
|
client: ClientConfig,
|
||||||
|
snapshots: list[SourceSnapshot],
|
||||||
|
source_proxy_names: dict[str, list[str]],
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
groups: list[dict[str, Any]] = []
|
||||||
|
source_auto_names: list[str] = []
|
||||||
|
all_proxy_names = [name for names in source_proxy_names.values() for name in names]
|
||||||
|
|
||||||
|
for snapshot in snapshots:
|
||||||
|
group_name = f"{snapshot.display_name} 自动"
|
||||||
|
source_auto_names.append(group_name)
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": group_name,
|
||||||
|
"type": "url-test",
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
"proxies": source_proxy_names.get(snapshot.name) or [client.direct_policy],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": client.mixed_auto_policy,
|
||||||
|
"type": "url-test",
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
"proxies": all_proxy_names or [client.direct_policy],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
region_names: list[str] = []
|
||||||
|
for region in app_config.regions.values():
|
||||||
|
matched = [name for name in all_proxy_names if re.search(region.filter, name)]
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": region.name,
|
||||||
|
"type": "url-test",
|
||||||
|
"url": str(client.test_url),
|
||||||
|
"interval": client.test_interval,
|
||||||
|
"tolerance": region.tolerance,
|
||||||
|
"proxies": matched or [client.direct_policy],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
region_names.append(region.name)
|
||||||
|
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": client.source_policy,
|
||||||
|
"type": "select",
|
||||||
|
"proxies": [client.mixed_auto_policy, *source_auto_names, client.direct_policy],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": client.manual_policy,
|
||||||
|
"type": "select",
|
||||||
|
"proxies": [*all_proxy_names, client.direct_policy] if all_proxy_names else [client.direct_policy],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"name": client.main_policy,
|
||||||
|
"type": "select",
|
||||||
|
"proxies": [
|
||||||
|
client.source_policy,
|
||||||
|
client.mixed_auto_policy,
|
||||||
|
*region_names,
|
||||||
|
client.manual_policy,
|
||||||
|
client.direct_policy,
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return groups
|
||||||
80
app/services/rules.py
Normal file
80
app/services/rules.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from app.config import get_settings
|
||||||
|
from app.models import AppConfig, ClientConfig, RuleConfig
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_policy(policy: str, client: ClientConfig) -> str:
|
||||||
|
return (
|
||||||
|
policy.replace("{{ main_policy }}", client.main_policy)
|
||||||
|
.replace("{{main_policy}}", client.main_policy)
|
||||||
|
.replace("{{ direct_policy }}", client.direct_policy)
|
||||||
|
.replace("{{direct_policy}}", client.direct_policy)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_rule_text(path: Path) -> str:
|
||||||
|
return path.read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def load_rule_payload(path: Path) -> list[str]:
|
||||||
|
if path.suffix.lower() in {".yaml", ".yml"}:
|
||||||
|
data = yaml.safe_load(path.read_text(encoding="utf-8")) or {}
|
||||||
|
payload = data.get("payload", [])
|
||||||
|
if not isinstance(payload, list):
|
||||||
|
raise ValueError(f"Rule file {path.name} must contain a list field named 'payload'")
|
||||||
|
return [str(item).strip() for item in payload if str(item).strip()]
|
||||||
|
|
||||||
|
lines: list[str] = []
|
||||||
|
for line in path.read_text(encoding="utf-8").splitlines():
|
||||||
|
stripped = line.strip()
|
||||||
|
if not stripped or stripped.startswith("#"):
|
||||||
|
continue
|
||||||
|
lines.append(stripped)
|
||||||
|
return lines
|
||||||
|
|
||||||
|
|
||||||
|
def build_rule_provider_entries(app_config: AppConfig, client: ClientConfig, base_url: str, public_path: str):
|
||||||
|
providers: dict[str, dict] = {}
|
||||||
|
for name, rule in app_config.rules.items():
|
||||||
|
entry = {
|
||||||
|
"behavior": rule.behavior,
|
||||||
|
"format": rule.format,
|
||||||
|
"url": f"{base_url}/{public_path}/rules/{name}.yaml",
|
||||||
|
"interval": rule.interval,
|
||||||
|
}
|
||||||
|
providers[name] = entry
|
||||||
|
return providers
|
||||||
|
|
||||||
|
|
||||||
|
def build_rule_set_references(app_config: AppConfig, client: ClientConfig) -> list[str]:
|
||||||
|
refs: list[str] = []
|
||||||
|
for name, rule in app_config.rules.items():
|
||||||
|
target = resolve_policy(rule.policy, client)
|
||||||
|
line = f"RULE-SET,{name},{target}"
|
||||||
|
if rule.no_resolve:
|
||||||
|
line += ",no-resolve"
|
||||||
|
refs.append(line)
|
||||||
|
refs.append(f"MATCH,{client.main_policy}")
|
||||||
|
return refs
|
||||||
|
|
||||||
|
|
||||||
|
def build_inline_rules(app_config: AppConfig, client: ClientConfig) -> list[str]:
|
||||||
|
settings = get_settings()
|
||||||
|
lines: list[str] = []
|
||||||
|
for rule in app_config.rules.values():
|
||||||
|
path = (settings.rules_dir / rule.file).resolve()
|
||||||
|
if not path.is_file() or settings.rules_dir.resolve() not in path.parents:
|
||||||
|
raise FileNotFoundError(f"Rule file missing: {rule.file}")
|
||||||
|
target = resolve_policy(rule.policy, client)
|
||||||
|
for payload_line in load_rule_payload(path):
|
||||||
|
line = f"{payload_line},{target}"
|
||||||
|
if rule.no_resolve:
|
||||||
|
line += ",no-resolve"
|
||||||
|
lines.append(line)
|
||||||
|
lines.append(f"MATCH,{client.main_policy}")
|
||||||
|
return lines
|
||||||
177
app/services/subscriptions.py
Normal file
177
app/services/subscriptions.py
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Any, Iterable
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
from app.config import get_settings
|
||||||
|
from app.models import FetchResult, ProviderDocument, SourceConfig, SourceSnapshot
|
||||||
|
from app.services.cache import TTLCache
|
||||||
|
from app.services.headers import parse_subscription_userinfo
|
||||||
|
|
||||||
|
|
||||||
|
_fetch_cache: TTLCache[FetchResult] = TTLCache()
|
||||||
|
_provider_cache: TTLCache[ProviderDocument] = TTLCache()
|
||||||
|
_snapshot_cache: TTLCache[SourceSnapshot] = TTLCache()
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_source(name: str, source: SourceConfig) -> FetchResult:
|
||||||
|
settings = get_settings()
|
||||||
|
ttl = source.cache_ttl_seconds or settings.cache_ttl_seconds
|
||||||
|
cached = _fetch_cache.get(name)
|
||||||
|
if cached is not None:
|
||||||
|
return cached
|
||||||
|
|
||||||
|
headers = {"User-Agent": settings.default_user_agent}
|
||||||
|
headers.update(source.headers)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=settings.request_timeout_seconds, follow_redirects=True) as client:
|
||||||
|
response = await client.get(source.url, headers=headers)
|
||||||
|
response.raise_for_status()
|
||||||
|
result = FetchResult(text=response.text, headers=dict(response.headers))
|
||||||
|
_fetch_cache.set(name, result, ttl)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def build_provider_document(name: str, source: SourceConfig) -> ProviderDocument:
|
||||||
|
settings = get_settings()
|
||||||
|
ttl = source.cache_ttl_seconds or settings.cache_ttl_seconds
|
||||||
|
cache_key = f"provider:{name}"
|
||||||
|
cached = _provider_cache.get(cache_key)
|
||||||
|
if cached is not None:
|
||||||
|
return cached
|
||||||
|
|
||||||
|
fetched = await fetch_source(name, source)
|
||||||
|
|
||||||
|
if source.kind != "clash_yaml":
|
||||||
|
raise ValueError(f"Unsupported source kind: {source.kind}")
|
||||||
|
|
||||||
|
proxies = parse_clash_yaml_proxies(fetched.text)
|
||||||
|
proxies = transform_proxies(proxies, source, settings.max_proxy_name_length)
|
||||||
|
|
||||||
|
document = ProviderDocument(proxies=proxies)
|
||||||
|
_provider_cache.set(cache_key, document, ttl)
|
||||||
|
return document
|
||||||
|
|
||||||
|
|
||||||
|
async def build_source_snapshot(name: str, source: SourceConfig) -> SourceSnapshot:
|
||||||
|
settings = get_settings()
|
||||||
|
ttl = source.cache_ttl_seconds or settings.cache_ttl_seconds
|
||||||
|
cache_key = f"snapshot:{name}"
|
||||||
|
cached = _snapshot_cache.get(cache_key)
|
||||||
|
if cached is not None:
|
||||||
|
return cached
|
||||||
|
|
||||||
|
fetched = await fetch_source(name, source)
|
||||||
|
document = await build_provider_document(name, source)
|
||||||
|
snapshot = SourceSnapshot(
|
||||||
|
name=name,
|
||||||
|
display_name=source.display_name or name,
|
||||||
|
document=document,
|
||||||
|
headers=fetched.headers,
|
||||||
|
quota=parse_subscription_userinfo(fetched.headers),
|
||||||
|
)
|
||||||
|
_snapshot_cache.set(cache_key, snapshot, ttl)
|
||||||
|
return snapshot
|
||||||
|
|
||||||
|
|
||||||
|
async def build_source_snapshots(source_items: Iterable[tuple[str, SourceConfig]]) -> list[SourceSnapshot]:
|
||||||
|
snapshots: list[SourceSnapshot] = []
|
||||||
|
for name, source in source_items:
|
||||||
|
snapshots.append(await build_source_snapshot(name, source))
|
||||||
|
return snapshots
|
||||||
|
|
||||||
|
|
||||||
|
async def build_merged_provider_document(source_items: Iterable[tuple[str, SourceConfig]]) -> ProviderDocument:
|
||||||
|
snapshots = await build_source_snapshots(source_items)
|
||||||
|
proxies: list[dict[str, Any]] = []
|
||||||
|
seen: set[str] = set()
|
||||||
|
|
||||||
|
for snapshot in snapshots:
|
||||||
|
for proxy in snapshot.document.proxies:
|
||||||
|
candidate = dict(proxy)
|
||||||
|
name = str(candidate.get("name", "")).strip()
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
original = name
|
||||||
|
index = 2
|
||||||
|
while name in seen:
|
||||||
|
name = f"{original} #{index}"
|
||||||
|
index += 1
|
||||||
|
candidate["name"] = name
|
||||||
|
seen.add(name)
|
||||||
|
proxies.append(candidate)
|
||||||
|
|
||||||
|
return ProviderDocument(proxies=proxies)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_first_quota(source_items: Iterable[tuple[str, SourceConfig]]):
|
||||||
|
source_list = list(source_items)
|
||||||
|
if not source_list:
|
||||||
|
return None
|
||||||
|
snapshot = await build_source_snapshot(source_list[0][0], source_list[0][1])
|
||||||
|
return snapshot.quota
|
||||||
|
|
||||||
|
|
||||||
|
def parse_clash_yaml_proxies(text: str) -> list[dict[str, Any]]:
|
||||||
|
data = yaml.safe_load(text)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise ValueError("Upstream YAML must be a mapping with a top-level 'proxies' field")
|
||||||
|
|
||||||
|
proxies = data.get("proxies")
|
||||||
|
if not isinstance(proxies, list):
|
||||||
|
raise ValueError("Upstream YAML must contain a list field named 'proxies'")
|
||||||
|
|
||||||
|
normalized: list[dict[str, Any]] = []
|
||||||
|
for item in proxies:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
continue
|
||||||
|
if not item.get("name") or not item.get("type"):
|
||||||
|
continue
|
||||||
|
normalized.append(item)
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
|
def transform_proxies(
|
||||||
|
proxies: list[dict[str, Any]], source: SourceConfig, max_proxy_name_length: int
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
include = re.compile(source.include_regex) if source.include_regex else None
|
||||||
|
exclude = re.compile(source.exclude_regex) if source.exclude_regex else None
|
||||||
|
|
||||||
|
transformed: list[dict[str, Any]] = []
|
||||||
|
seen: dict[str, int] = {}
|
||||||
|
|
||||||
|
for proxy in proxies:
|
||||||
|
name = str(proxy.get("name", "")).strip()
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
if include and not include.search(name):
|
||||||
|
continue
|
||||||
|
if exclude and exclude.search(name):
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_proxy = dict(proxy)
|
||||||
|
new_name = f"{source.prefix}{name}{source.suffix}".strip()
|
||||||
|
if len(new_name) > max_proxy_name_length:
|
||||||
|
new_name = new_name[:max_proxy_name_length].rstrip()
|
||||||
|
|
||||||
|
count = seen.get(new_name, 0) + 1
|
||||||
|
seen[new_name] = count
|
||||||
|
if count > 1:
|
||||||
|
new_name = f"{new_name} #{count}"
|
||||||
|
|
||||||
|
new_proxy["name"] = new_name
|
||||||
|
transformed.append(new_proxy)
|
||||||
|
|
||||||
|
return transformed
|
||||||
|
|
||||||
|
|
||||||
|
def dump_provider_yaml(document: ProviderDocument) -> str:
|
||||||
|
return yaml.safe_dump(
|
||||||
|
{"proxies": document.proxies},
|
||||||
|
allow_unicode=True,
|
||||||
|
sort_keys=False,
|
||||||
|
default_flow_style=False,
|
||||||
|
)
|
||||||
4
config/rules/cn-ip.yaml
Normal file
4
config/rules/cn-ip.yaml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
payload:
|
||||||
|
- 10.0.0.0/8
|
||||||
|
- 172.16.0.0/12
|
||||||
|
- 192.168.0.0/16
|
||||||
5
config/rules/direct.yaml
Normal file
5
config/rules/direct.yaml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
payload:
|
||||||
|
- DOMAIN-SUFFIX,lan
|
||||||
|
- DOMAIN-SUFFIX,local
|
||||||
|
- DOMAIN-SUFFIX,apple.com
|
||||||
|
- DOMAIN-SUFFIX,icloud.com
|
||||||
5
config/rules/proxy.yaml
Normal file
5
config/rules/proxy.yaml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
payload:
|
||||||
|
- DOMAIN-SUFFIX,google.com
|
||||||
|
- DOMAIN-SUFFIX,youtube.com
|
||||||
|
- DOMAIN-SUFFIX,github.com
|
||||||
|
- DOMAIN-SUFFIX,openai.com
|
||||||
4
config/rules/reject.yaml
Normal file
4
config/rules/reject.yaml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
payload:
|
||||||
|
- DOMAIN-SUFFIX,doubleclick.net
|
||||||
|
- DOMAIN-SUFFIX,googlesyndication.com
|
||||||
|
- DOMAIN-SUFFIX,adservice.google.com
|
||||||
102
config/sources.yaml
Normal file
102
config/sources.yaml
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
public_path: ${PUBLIC_PATH}
|
||||||
|
|
||||||
|
sources:
|
||||||
|
airport-a:
|
||||||
|
enabled: true
|
||||||
|
display_name: 蛋挞云
|
||||||
|
kind: clash_yaml
|
||||||
|
url: ${AIRPORT_A_URL}
|
||||||
|
prefix: "[A] "
|
||||||
|
include_regex: ""
|
||||||
|
exclude_regex: ""
|
||||||
|
|
||||||
|
airport-b:
|
||||||
|
enabled: true
|
||||||
|
display_name: AnyTLS
|
||||||
|
kind: clash_yaml
|
||||||
|
url: ${AIRPORT_B_URL}
|
||||||
|
prefix: "[B] "
|
||||||
|
include_regex: ""
|
||||||
|
exclude_regex: ""
|
||||||
|
|
||||||
|
regions:
|
||||||
|
hk:
|
||||||
|
name: "🇭🇰 香港自动"
|
||||||
|
filter: "(?i)(香港|hk|hong kong)"
|
||||||
|
tolerance: 50
|
||||||
|
sg:
|
||||||
|
name: "🇸🇬 新加坡自动"
|
||||||
|
filter: "(?i)(新加坡|狮城|sg|singapore)"
|
||||||
|
tolerance: 50
|
||||||
|
jp:
|
||||||
|
name: "🇯🇵 日本自动"
|
||||||
|
filter: "(?i)(日本|jp|japan)"
|
||||||
|
tolerance: 50
|
||||||
|
us:
|
||||||
|
name: "🇺🇸 美国自动"
|
||||||
|
filter: "(?i)(美国|美國|us|united states)"
|
||||||
|
tolerance: 150
|
||||||
|
|
||||||
|
rules:
|
||||||
|
reject:
|
||||||
|
file: reject.yaml
|
||||||
|
behavior: domain
|
||||||
|
format: yaml
|
||||||
|
interval: 86400
|
||||||
|
policy: REJECT
|
||||||
|
|
||||||
|
direct:
|
||||||
|
file: direct.yaml
|
||||||
|
behavior: domain
|
||||||
|
format: yaml
|
||||||
|
interval: 86400
|
||||||
|
policy: "{{ direct_policy }}"
|
||||||
|
|
||||||
|
proxy:
|
||||||
|
file: proxy.yaml
|
||||||
|
behavior: domain
|
||||||
|
format: yaml
|
||||||
|
interval: 86400
|
||||||
|
policy: "{{ main_policy }}"
|
||||||
|
|
||||||
|
cn-ip:
|
||||||
|
file: cn-ip.yaml
|
||||||
|
behavior: ipcidr
|
||||||
|
format: yaml
|
||||||
|
interval: 86400
|
||||||
|
policy: "{{ direct_policy }}"
|
||||||
|
no_resolve: true
|
||||||
|
|
||||||
|
clients:
|
||||||
|
mihomo:
|
||||||
|
title: HomeLab Mihomo
|
||||||
|
provider_interval: 21600
|
||||||
|
rule_interval: 86400
|
||||||
|
test_url: https://www.gstatic.com/generate_204
|
||||||
|
test_interval: 300
|
||||||
|
main_policy: 节点选择
|
||||||
|
source_policy: ☁️ 机场选择
|
||||||
|
mixed_auto_policy: ♻️ 自动选择
|
||||||
|
manual_policy: 🚀 手动切换
|
||||||
|
direct_policy: DIRECT
|
||||||
|
mode: rule
|
||||||
|
allow_lan: true
|
||||||
|
ipv6: true
|
||||||
|
mixed_port: 7890
|
||||||
|
socks_port: 7891
|
||||||
|
log_level: info
|
||||||
|
|
||||||
|
stash:
|
||||||
|
title: HomeLab Stash
|
||||||
|
provider_interval: 21600
|
||||||
|
rule_interval: 86400
|
||||||
|
test_url: https://www.gstatic.com/generate_204
|
||||||
|
test_interval: 300
|
||||||
|
main_policy: 节点选择
|
||||||
|
source_policy: ☁️ 机场选择
|
||||||
|
mixed_auto_policy: ♻️ 自动选择
|
||||||
|
manual_policy: 🚀 手动切换
|
||||||
|
direct_policy: DIRECT
|
||||||
|
mode: rule
|
||||||
|
ipv6: true
|
||||||
|
log_level: info
|
||||||
11
docker-compose.yaml
Normal file
11
docker-compose.yaml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
services:
|
||||||
|
sub-provider:
|
||||||
|
build: .
|
||||||
|
container_name: sub-provider
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "18080:18080"
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
volumes:
|
||||||
|
- ./config:/app/config:ro
|
||||||
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
fastapi>=0.111,<1.0
|
||||||
|
uvicorn[standard]>=0.30,<1.0
|
||||||
|
httpx>=0.27,<1.0
|
||||||
|
PyYAML>=6.0,<7.0
|
||||||
|
pydantic-settings>=2.3,<3.0
|
||||||
Reference in New Issue
Block a user