refactor: remove legacy ProxySource layer
Delete source.py, ProxySourceConfig, and Config.proxy_source. ProxyPool fully supersedes ProxySource. The YAML backward-compat conversion in load_config is preserved so old configs still work. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -27,17 +27,6 @@ class ChainHop:
|
||||
return f"{self.proto}://{auth}{self.host}:{self.port}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProxySourceConfig:
|
||||
"""Configuration for the dynamic proxy source API (legacy)."""
|
||||
|
||||
url: str = ""
|
||||
proto: str | None = None
|
||||
country: str | None = None
|
||||
limit: int | None = 1000
|
||||
refresh: float = 300.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class PoolSourceConfig:
|
||||
"""A single proxy source: HTTP API or text file."""
|
||||
@@ -77,7 +66,6 @@ class Config:
|
||||
max_connections: int = 256
|
||||
pool_size: int = 0
|
||||
pool_max_idle: float = 30.0
|
||||
proxy_source: ProxySourceConfig | None = None
|
||||
proxy_pool: ProxyPoolConfig | None = None
|
||||
config_file: str = ""
|
||||
|
||||
@@ -218,9 +206,5 @@ def load_config(path: str | Path) -> Config:
|
||||
sources=[PoolSourceConfig(url=url, proto=proto, country=country, limit=limit)],
|
||||
refresh=refresh,
|
||||
)
|
||||
# keep legacy field for source.py compat during transition
|
||||
config.proxy_source = ProxySourceConfig(
|
||||
url=url, proto=proto, country=country, limit=limit, refresh=refresh,
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
@@ -13,7 +13,6 @@ from .connpool import FirstHopPool
|
||||
from .metrics import Metrics
|
||||
from .pool import ProxyPool
|
||||
from .proto import ProtoError, Socks5Reply, build_chain, read_socks5_address
|
||||
from .source import ProxySource
|
||||
|
||||
logger = logging.getLogger("s5p")
|
||||
|
||||
@@ -60,7 +59,7 @@ async def _handle_client(
|
||||
client_reader: asyncio.StreamReader,
|
||||
client_writer: asyncio.StreamWriter,
|
||||
config: Config,
|
||||
proxy_pool: ProxyPool | ProxySource | None = None,
|
||||
proxy_pool: ProxyPool | None = None,
|
||||
metrics: Metrics | None = None,
|
||||
first_hop_pool: FirstHopPool | None = None,
|
||||
) -> None:
|
||||
@@ -123,7 +122,7 @@ async def _handle_client(
|
||||
break
|
||||
except (ProtoError, TimeoutError, ConnectionError, OSError) as e:
|
||||
last_err = e
|
||||
if pool_hop and isinstance(proxy_pool, ProxyPool):
|
||||
if pool_hop and proxy_pool:
|
||||
proxy_pool.report_failure(pool_hop)
|
||||
if metrics:
|
||||
metrics.retries += 1
|
||||
@@ -217,13 +216,9 @@ async def serve(config: Config) -> None:
|
||||
"""Start the SOCKS5 proxy server."""
|
||||
metrics = Metrics()
|
||||
|
||||
proxy_pool: ProxyPool | ProxySource | None = None
|
||||
proxy_pool: ProxyPool | None = None
|
||||
if config.proxy_pool and config.proxy_pool.sources:
|
||||
pool = ProxyPool(config.proxy_pool, config.chain, config.timeout)
|
||||
await pool.start()
|
||||
proxy_pool = pool
|
||||
elif config.proxy_source and config.proxy_source.url:
|
||||
proxy_pool = ProxySource(config.proxy_source)
|
||||
proxy_pool = ProxyPool(config.proxy_pool, config.chain, config.timeout)
|
||||
await proxy_pool.start()
|
||||
|
||||
hop_pool: FirstHopPool | None = None
|
||||
@@ -249,16 +244,13 @@ async def serve(config: Config) -> None:
|
||||
else:
|
||||
logger.info(" mode: direct (no chain)")
|
||||
|
||||
if isinstance(proxy_pool, ProxyPool):
|
||||
if proxy_pool:
|
||||
nsrc = len(config.proxy_pool.sources)
|
||||
logger.info(
|
||||
" pool: %d proxies, %d alive (from %d source%s)",
|
||||
proxy_pool.count, proxy_pool.alive_count, nsrc, "s" if nsrc != 1 else "",
|
||||
)
|
||||
logger.info(" retries: %d", config.retries)
|
||||
elif proxy_pool:
|
||||
logger.info(" proxy source: %s (%d proxies)", config.proxy_source.url, proxy_pool.count)
|
||||
logger.info(" retries: %d", config.retries)
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
stop = loop.create_future()
|
||||
@@ -287,7 +279,7 @@ async def serve(config: Config) -> None:
|
||||
for h in root.handlers:
|
||||
h.setLevel(level)
|
||||
logging.getLogger("s5p").setLevel(level)
|
||||
if isinstance(proxy_pool, ProxyPool) and new.proxy_pool:
|
||||
if proxy_pool and new.proxy_pool:
|
||||
await proxy_pool.reload(new.proxy_pool)
|
||||
logger.info("reload: config reloaded")
|
||||
|
||||
@@ -297,7 +289,7 @@ async def serve(config: Config) -> None:
|
||||
loop.add_signal_handler(signal.SIGHUP, _on_sighup)
|
||||
|
||||
metrics_stop = asyncio.Event()
|
||||
pool_ref = proxy_pool if isinstance(proxy_pool, ProxyPool) else None
|
||||
pool_ref = proxy_pool
|
||||
metrics_task = asyncio.create_task(_metrics_logger(metrics, metrics_stop, pool_ref))
|
||||
|
||||
async with srv:
|
||||
@@ -305,7 +297,7 @@ async def serve(config: Config) -> None:
|
||||
logger.info("received %s, shutting down", signal.Signals(sig).name)
|
||||
if hop_pool:
|
||||
await hop_pool.stop()
|
||||
if isinstance(proxy_pool, ProxyPool):
|
||||
if proxy_pool:
|
||||
await proxy_pool.stop()
|
||||
shutdown_line = metrics.summary()
|
||||
if pool_ref:
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
"""Dynamic proxy source -- fetches proxies from an HTTP API."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from .config import ChainHop, ProxySourceConfig, parse_api_proxies
|
||||
from .http import http_get_json
|
||||
|
||||
logger = logging.getLogger("s5p")
|
||||
|
||||
|
||||
class ProxySource:
|
||||
"""Fetches and caches proxies from an HTTP API.
|
||||
|
||||
Picks a random proxy on each ``get()`` call. Refreshes the cache
|
||||
in the background at a configurable interval.
|
||||
"""
|
||||
|
||||
def __init__(self, cfg: ProxySourceConfig) -> None:
|
||||
self._cfg = cfg
|
||||
self._cache: list[ChainHop] = []
|
||||
self._last_fetch: float = 0.0
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
@property
|
||||
def count(self) -> int:
|
||||
"""Number of proxies currently cached."""
|
||||
return len(self._cache)
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Initial fetch. Call once before serving."""
|
||||
await self._refresh()
|
||||
|
||||
async def get(self) -> ChainHop | None:
|
||||
"""Return a random proxy from the cache, refreshing if stale."""
|
||||
now = time.monotonic()
|
||||
if now - self._last_fetch > self._cfg.refresh:
|
||||
await self._refresh()
|
||||
if not self._cache:
|
||||
return None
|
||||
return random.choice(self._cache)
|
||||
|
||||
async def _refresh(self) -> None:
|
||||
"""Fetch proxy list from the API (async)."""
|
||||
async with self._lock:
|
||||
try:
|
||||
proxies = await self._fetch()
|
||||
self._cache = proxies
|
||||
self._last_fetch = time.monotonic()
|
||||
logger.info("proxy source: loaded %d proxies", len(proxies))
|
||||
except Exception as e:
|
||||
logger.warning("proxy source: fetch failed: %s", e)
|
||||
if self._cache:
|
||||
logger.info("proxy source: using stale cache (%d proxies)", len(self._cache))
|
||||
|
||||
async def _fetch(self) -> list[ChainHop]:
|
||||
"""Async HTTP fetch."""
|
||||
params: dict[str, str] = {}
|
||||
if self._cfg.limit:
|
||||
params["limit"] = str(self._cfg.limit)
|
||||
if self._cfg.proto:
|
||||
params["proto"] = self._cfg.proto
|
||||
if self._cfg.country:
|
||||
params["country"] = self._cfg.country
|
||||
|
||||
url = self._cfg.url
|
||||
if params:
|
||||
sep = "&" if "?" in url else "?"
|
||||
url = f"{url}{sep}{urlencode(params)}"
|
||||
|
||||
data = await http_get_json(url)
|
||||
return parse_api_proxies(data)
|
||||
Reference in New Issue
Block a user