Add proxy_pools: top-level config (dict of name -> pool config) so
listeners can draw from different proxy sources. Each pool has
independent sources, health testing, state persistence, and refresh
cycles.
- PoolSourceConfig gains mitm: bool|None for API ?mitm=0/1 filtering
- ListenerConfig gains pool_name for named pool assignment
- ProxyPool gains name param with prefixed log messages and
per-name state file derivation (pool-{name}.json)
- server.py replaces single proxy_pool with proxy_pools dict,
validates listener pool references at startup, per-listener closure
- API /pool merges all pools (with pool field on multi-pool entries),
/status and /config expose per-pool summaries
- Backward compat: singular proxy_pool: registers as "default"
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
806 lines
28 KiB
Python
806 lines
28 KiB
Python
"""Tests for the managed proxy pool."""
|
|
|
|
import asyncio
|
|
import ssl
|
|
import time
|
|
from unittest.mock import AsyncMock, MagicMock, patch
|
|
|
|
import pytest
|
|
|
|
from s5p.config import ChainHop, PoolSourceConfig, ProxyPoolConfig
|
|
from s5p.pool import ProxyEntry, ProxyPool
|
|
|
|
|
|
class TestProxyPoolName:
|
|
"""Test pool name and state path derivation."""
|
|
|
|
def test_default_name(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
assert pool.name == "default"
|
|
assert pool._log_prefix == "pool"
|
|
|
|
def test_named_pool(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0, name="clean")
|
|
assert pool.name == "clean"
|
|
assert pool._log_prefix == "pool[clean]"
|
|
|
|
def test_state_path_default(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
assert pool._state_path.name == "pool.json"
|
|
|
|
def test_state_path_named(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0, name="clean")
|
|
assert pool._state_path.name == "pool-clean.json"
|
|
|
|
def test_state_path_explicit_overrides_name(self):
|
|
cfg = ProxyPoolConfig(sources=[], state_file="/data/custom.json")
|
|
pool = ProxyPool(cfg, [], timeout=10.0, name="clean")
|
|
assert str(pool._state_path) == "/data/custom.json"
|
|
|
|
|
|
class TestProxyPoolMitmQuery:
|
|
"""Test mitm query parameter in API fetch."""
|
|
|
|
def test_mitm_false(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
src = PoolSourceConfig(url="http://api:8081/proxies/all", mitm=False)
|
|
|
|
async def run():
|
|
from unittest.mock import AsyncMock, patch
|
|
mock_ret = {"proxies": []}
|
|
with patch(
|
|
"s5p.pool.http_get_json",
|
|
new_callable=AsyncMock, return_value=mock_ret,
|
|
) as mock:
|
|
await pool._fetch_api(src)
|
|
call_url = mock.call_args[0][0]
|
|
assert "mitm=0" in call_url
|
|
|
|
asyncio.run(run())
|
|
|
|
def test_mitm_true(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
src = PoolSourceConfig(url="http://api:8081/proxies/all", mitm=True)
|
|
|
|
async def run():
|
|
from unittest.mock import AsyncMock, patch
|
|
mock_ret = {"proxies": []}
|
|
with patch(
|
|
"s5p.pool.http_get_json",
|
|
new_callable=AsyncMock, return_value=mock_ret,
|
|
) as mock:
|
|
await pool._fetch_api(src)
|
|
call_url = mock.call_args[0][0]
|
|
assert "mitm=1" in call_url
|
|
|
|
asyncio.run(run())
|
|
|
|
def test_mitm_none_omitted(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
src = PoolSourceConfig(url="http://api:8081/proxies/all", mitm=None)
|
|
|
|
async def run():
|
|
from unittest.mock import AsyncMock, patch
|
|
mock_ret = {"proxies": []}
|
|
with patch(
|
|
"s5p.pool.http_get_json",
|
|
new_callable=AsyncMock, return_value=mock_ret,
|
|
) as mock:
|
|
await pool._fetch_api(src)
|
|
call_url = mock.call_args[0][0]
|
|
assert "mitm" not in call_url
|
|
|
|
asyncio.run(run())
|
|
|
|
|
|
class TestProxyEntry:
|
|
"""Test ProxyEntry defaults."""
|
|
|
|
def test_defaults(self):
|
|
hop = ChainHop(proto="socks5", host="1.2.3.4", port=1080)
|
|
entry = ProxyEntry(hop=hop)
|
|
assert entry.alive is False
|
|
assert entry.fails == 0
|
|
assert entry.tests == 0
|
|
|
|
|
|
class TestEffectiveChain:
|
|
"""Test chain_nodes round-robin in pool health tests."""
|
|
|
|
def test_no_nodes_returns_original(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
chain = [ChainHop(proto="socks5", host="10.0.0.1", port=9050)]
|
|
pool = ProxyPool(cfg, chain, timeout=10.0)
|
|
assert pool._effective_chain() == chain
|
|
|
|
def test_round_robin_across_nodes(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
chain = [ChainHop(proto="socks5", host="original", port=9050)]
|
|
nodes = [
|
|
ChainHop(proto="socks5", host="node-a", port=9050),
|
|
ChainHop(proto="socks5", host="node-b", port=9050),
|
|
ChainHop(proto="socks5", host="node-c", port=9050),
|
|
]
|
|
pool = ProxyPool(cfg, chain, timeout=10.0, chain_nodes=nodes)
|
|
|
|
hosts = [pool._effective_chain()[0].host for _ in range(6)]
|
|
assert hosts == [
|
|
"node-a", "node-b", "node-c",
|
|
"node-a", "node-b", "node-c",
|
|
]
|
|
|
|
def test_empty_chain_no_replacement(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
nodes = [ChainHop(proto="socks5", host="node-a", port=9050)]
|
|
pool = ProxyPool(cfg, [], timeout=10.0, chain_nodes=nodes)
|
|
assert pool._effective_chain() == []
|
|
|
|
|
|
class TestProxyPoolMerge:
|
|
"""Test proxy deduplication and merge."""
|
|
|
|
def test_merge_dedup(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
proxies = [
|
|
ChainHop(proto="socks5", host="1.2.3.4", port=1080),
|
|
ChainHop(proto="socks5", host="1.2.3.4", port=1080),
|
|
ChainHop(proto="socks5", host="5.6.7.8", port=1080),
|
|
]
|
|
pool._merge(proxies)
|
|
assert pool.count == 2
|
|
|
|
def test_merge_updates_existing(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
hop = ChainHop(proto="socks5", host="1.2.3.4", port=1080)
|
|
pool._merge([hop])
|
|
first_seen = pool._proxies["socks5://1.2.3.4:1080"].last_seen
|
|
|
|
# merge again -- last_seen should update
|
|
time.sleep(0.01)
|
|
pool._merge([hop])
|
|
assert pool._proxies["socks5://1.2.3.4:1080"].last_seen >= first_seen
|
|
assert pool.count == 1
|
|
|
|
|
|
class TestProxyPoolGet:
|
|
"""Test proxy selection."""
|
|
|
|
def test_get_empty(self):
|
|
import asyncio
|
|
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
result = asyncio.run(pool.get())
|
|
assert result is None
|
|
|
|
def test_get_returns_alive(self):
|
|
import asyncio
|
|
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
hop = ChainHop(proto="socks5", host="1.2.3.4", port=1080)
|
|
pool._proxies["socks5://1.2.3.4:1080"] = ProxyEntry(hop=hop, alive=True)
|
|
pool._rebuild_alive()
|
|
result = asyncio.run(pool.get())
|
|
assert result is not None
|
|
assert result.host == "1.2.3.4"
|
|
|
|
|
|
class TestProxyPoolWeight:
|
|
"""Test weighted proxy selection."""
|
|
|
|
def test_recent_proxy_preferred(self):
|
|
import asyncio
|
|
from collections import Counter
|
|
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
fresh = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
stale = ChainHop(proto="socks5", host="10.0.0.2", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=fresh, alive=True, last_ok=now,
|
|
)
|
|
pool._proxies["socks5://10.0.0.2:1080"] = ProxyEntry(
|
|
hop=stale, alive=True, last_ok=now - 3600,
|
|
)
|
|
pool._rebuild_alive()
|
|
|
|
counts: Counter[str] = Counter()
|
|
for _ in range(1000):
|
|
hop = asyncio.run(pool.get())
|
|
counts[hop.host] += 1
|
|
|
|
assert counts["10.0.0.1"] > counts["10.0.0.2"] * 3
|
|
|
|
def test_weight_values(self):
|
|
hop = ChainHop(proto="socks5", host="1.2.3.4", port=1080)
|
|
now = 1000.0
|
|
|
|
# just tested
|
|
entry = ProxyEntry(hop=hop, last_ok=now)
|
|
assert ProxyPool._weight(entry, now) == pytest.approx(1.0)
|
|
|
|
# 5 minutes ago
|
|
entry.last_ok = now - 300
|
|
assert ProxyPool._weight(entry, now) == pytest.approx(0.5)
|
|
|
|
# never tested
|
|
entry.last_ok = 0
|
|
assert ProxyPool._weight(entry, now) == pytest.approx(0.01)
|
|
|
|
def test_weight_failure_penalty(self):
|
|
hop = ChainHop(proto="socks5", host="1.2.3.4", port=1080)
|
|
now = 1000.0
|
|
|
|
# healthy proxy: weight ~1.0
|
|
entry = ProxyEntry(hop=hop, last_ok=now)
|
|
assert ProxyPool._weight(entry, now) == pytest.approx(1.0)
|
|
|
|
# just failed: weight drops to floor (fail_age=0 -> multiplier=0)
|
|
entry.last_fail = now
|
|
assert ProxyPool._weight(entry, now) == pytest.approx(0.01)
|
|
|
|
# 30s after failure: base=1/(1+30/300)=0.909, penalty=30/60=0.5 -> ~0.45
|
|
assert ProxyPool._weight(entry, now + 30) == pytest.approx(0.4545, abs=0.05)
|
|
|
|
# 60s after failure: penalty=1.0, only base decay -> 1/(1+60/300)=0.833
|
|
assert ProxyPool._weight(entry, now + 60) == pytest.approx(0.833, abs=0.05)
|
|
|
|
def test_report_failure(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
hop = ChainHop(proto="socks5", host="1.2.3.4", port=1080)
|
|
pool._proxies["socks5://1.2.3.4:1080"] = ProxyEntry(
|
|
hop=hop, alive=True, last_ok=time.time(),
|
|
)
|
|
|
|
assert pool._proxies["socks5://1.2.3.4:1080"].last_fail == 0.0
|
|
pool.report_failure(hop)
|
|
assert pool._proxies["socks5://1.2.3.4:1080"].last_fail > 0.0
|
|
|
|
def test_report_failure_unknown_proxy(self):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
hop = ChainHop(proto="socks5", host="9.9.9.9", port=1080)
|
|
pool.report_failure(hop) # should not raise
|
|
|
|
|
|
class TestDynamicConcurrency:
|
|
"""Test dynamic health test concurrency scaling."""
|
|
|
|
def test_scales_to_ten_percent(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], test_concurrency=25)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
# add 100 proxies -> effective concurrency = max(3, min(100//10, 25)) = 10
|
|
for i in range(100):
|
|
hop = ChainHop(proto="socks5", host=f"10.0.{i // 256}.{i % 256}", port=1080)
|
|
key = f"socks5://10.0.{i // 256}.{i % 256}:1080"
|
|
pool._proxies[key] = ProxyEntry(hop=hop, alive=False, last_seen=now)
|
|
|
|
captured = {}
|
|
|
|
original_semaphore = asyncio.Semaphore
|
|
|
|
def capture_semaphore(value):
|
|
captured["concurrency"] = value
|
|
return original_semaphore(value)
|
|
|
|
with (
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=True),
|
|
patch("s5p.pool.asyncio.Semaphore", side_effect=capture_semaphore),
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
|
|
assert captured["concurrency"] == 10
|
|
|
|
def test_minimum_of_three(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], test_concurrency=25)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
# 5 proxies -> 5//10=0, but min is 3
|
|
for i in range(5):
|
|
hop = ChainHop(proto="socks5", host=f"10.0.0.{i}", port=1080)
|
|
pool._proxies[f"socks5://10.0.0.{i}:1080"] = ProxyEntry(
|
|
hop=hop, alive=False, last_seen=now,
|
|
)
|
|
|
|
captured = {}
|
|
|
|
original_semaphore = asyncio.Semaphore
|
|
|
|
def capture_semaphore(value):
|
|
captured["concurrency"] = value
|
|
return original_semaphore(value)
|
|
|
|
with (
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=True),
|
|
patch("s5p.pool.asyncio.Semaphore", side_effect=capture_semaphore),
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
|
|
assert captured["concurrency"] == 3
|
|
|
|
def test_capped_by_config(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], test_concurrency=5)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
# 1000 proxies -> 1000//10=100, capped at 5
|
|
for i in range(1000):
|
|
h = f"10.{i // 65536}.{(i // 256) % 256}.{i % 256}"
|
|
hop = ChainHop(proto="socks5", host=h, port=1080)
|
|
key = str(hop)
|
|
pool._proxies[key] = ProxyEntry(hop=hop, alive=False, last_seen=now)
|
|
|
|
captured = {}
|
|
|
|
original_semaphore = asyncio.Semaphore
|
|
|
|
def capture_semaphore(value):
|
|
captured["concurrency"] = value
|
|
return original_semaphore(value)
|
|
|
|
with (
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=True),
|
|
patch("s5p.pool.asyncio.Semaphore", side_effect=capture_semaphore),
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
|
|
assert captured["concurrency"] == 5
|
|
|
|
|
|
class TestProxyPoolHealthTests:
|
|
"""Test selective health testing."""
|
|
|
|
def test_selective_keys(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
hop_a = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
hop_b = ChainHop(proto="socks5", host="10.0.0.2", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=hop_a, alive=False, last_seen=now,
|
|
)
|
|
pool._proxies["socks5://10.0.0.2:1080"] = ProxyEntry(
|
|
hop=hop_b, alive=False, last_seen=now,
|
|
)
|
|
|
|
# only test proxy A
|
|
with patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=True) as mock:
|
|
asyncio.run(pool._run_health_tests(keys=["socks5://10.0.0.1:1080"]))
|
|
# should only have been called for proxy A
|
|
assert mock.call_count == 1
|
|
assert mock.call_args[0][0] == "socks5://10.0.0.1:1080"
|
|
|
|
assert pool._proxies["socks5://10.0.0.1:1080"].alive is True
|
|
# proxy B untouched
|
|
assert pool._proxies["socks5://10.0.0.2:1080"].alive is False
|
|
|
|
def test_chain_check_skips_on_failure(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
chain_hop = ChainHop(proto="socks5", host="127.0.0.1", port=9050)
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [chain_hop], timeout=10.0)
|
|
|
|
now = time.time()
|
|
hop = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=hop, alive=True, last_seen=now, last_ok=now,
|
|
)
|
|
pool._rebuild_alive()
|
|
|
|
# chain test fails -> proxy tests should be skipped
|
|
with (
|
|
patch.object(pool, "_test_chain", new_callable=AsyncMock, return_value=False),
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock) as mock_proxy,
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
mock_proxy.assert_not_called()
|
|
|
|
# proxy should remain in its previous state (untouched)
|
|
assert pool._proxies["socks5://10.0.0.1:1080"].alive is True
|
|
|
|
def test_chain_check_passes(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
chain_hop = ChainHop(proto="socks5", host="127.0.0.1", port=9050)
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [chain_hop], timeout=10.0)
|
|
|
|
now = time.time()
|
|
hop = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=hop, alive=False, last_seen=now,
|
|
)
|
|
|
|
# chain test passes -> proxy tests should run
|
|
with (
|
|
patch.object(pool, "_test_chain", new_callable=AsyncMock, return_value=True),
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=True),
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
|
|
assert pool._proxies["socks5://10.0.0.1:1080"].alive is True
|
|
|
|
def test_no_chain_skips_check(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0) # no static chain
|
|
|
|
now = time.time()
|
|
hop = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=hop, alive=False, last_seen=now,
|
|
)
|
|
|
|
# no chain -> _test_chain should not be called, proxy tests run
|
|
with (
|
|
patch.object(pool, "_test_chain", new_callable=AsyncMock) as mock_chain,
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=True),
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
mock_chain.assert_not_called()
|
|
|
|
assert pool._proxies["socks5://10.0.0.1:1080"].alive is True
|
|
|
|
|
|
class TestProxyPoolReport:
|
|
"""Test dead proxy reporting."""
|
|
|
|
def test_report_called_on_eviction(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], report_url="http://api:8081/report", max_fails=1)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
hop = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=hop, alive=False, last_seen=now, fails=0,
|
|
)
|
|
|
|
with (
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=False),
|
|
patch.object(pool, "_report_dead", new_callable=AsyncMock) as mock_report,
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
# proxy should be evicted (fails=1 >= max_fails=1)
|
|
assert "socks5://10.0.0.1:1080" not in pool._proxies
|
|
mock_report.assert_called_once()
|
|
keys = mock_report.call_args[0][0]
|
|
assert "socks5://10.0.0.1:1080" in keys
|
|
|
|
def test_report_not_called_without_url(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], max_fails=1) # no report_url
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
hop = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=hop, alive=False, last_seen=now, fails=0,
|
|
)
|
|
|
|
with (
|
|
patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=False),
|
|
patch.object(pool, "_report_dead", new_callable=AsyncMock) as mock_report,
|
|
):
|
|
asyncio.run(pool._run_health_tests())
|
|
mock_report.assert_not_called()
|
|
|
|
def test_report_async_payload(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], report_url="http://api:8081/report")
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
with patch("s5p.pool.http_post_json", new_callable=AsyncMock) as mock_post:
|
|
asyncio.run(pool._report_dead(["socks5://10.0.0.1:1080"]))
|
|
mock_post.assert_called_once()
|
|
url = mock_post.call_args[0][0]
|
|
payload = mock_post.call_args[0][1]
|
|
assert url == "http://api:8081/report"
|
|
assert payload == {"dead": [{"proto": "socks5", "proxy": "10.0.0.1:1080"}]}
|
|
|
|
|
|
class TestProxyPoolStaleExpiry:
|
|
"""Test stale proxy eviction."""
|
|
|
|
def test_stale_dead_proxy_evicted(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], refresh=300)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
# stale + dead: should be evicted
|
|
stale = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=stale, alive=False, last_seen=now - 1200,
|
|
)
|
|
# fresh + dead: should survive (recently seen, might recover)
|
|
fresh_dead = ChainHop(proto="socks5", host="10.0.0.2", port=1080)
|
|
pool._proxies["socks5://10.0.0.2:1080"] = ProxyEntry(
|
|
hop=fresh_dead, alive=False, last_seen=now,
|
|
)
|
|
|
|
with patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=False):
|
|
asyncio.run(pool._run_health_tests())
|
|
|
|
assert "socks5://10.0.0.1:1080" not in pool._proxies
|
|
assert "socks5://10.0.0.2:1080" in pool._proxies
|
|
|
|
def test_stale_alive_proxy_kept(self):
|
|
import asyncio
|
|
from unittest.mock import AsyncMock, patch
|
|
|
|
cfg = ProxyPoolConfig(sources=[], refresh=300)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
now = time.time()
|
|
# stale but alive: should survive (still passing health tests)
|
|
hop = ChainHop(proto="socks5", host="10.0.0.1", port=1080)
|
|
pool._proxies["socks5://10.0.0.1:1080"] = ProxyEntry(
|
|
hop=hop, alive=True, last_seen=now - 1200, last_ok=now,
|
|
)
|
|
|
|
with patch.object(pool, "_test_proxy", new_callable=AsyncMock, return_value=True):
|
|
asyncio.run(pool._run_health_tests())
|
|
|
|
assert "socks5://10.0.0.1:1080" in pool._proxies
|
|
|
|
|
|
class TestProxyPoolFetchFile:
|
|
"""Test file source parsing."""
|
|
|
|
def test_parse_file(self, tmp_path):
|
|
proxy_file = tmp_path / "proxies.txt"
|
|
proxy_file.write_text(
|
|
"# comment\n"
|
|
"socks5://1.2.3.4:1080\n"
|
|
"socks5://user:pass@5.6.7.8:1080\n"
|
|
"http://proxy.example.com:8080\n"
|
|
"\n"
|
|
" # another comment\n"
|
|
)
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
src = PoolSourceConfig(file=str(proxy_file))
|
|
result = pool._fetch_file_sync(src)
|
|
assert len(result) == 3
|
|
assert result[0].proto == "socks5"
|
|
assert result[0].host == "1.2.3.4"
|
|
assert result[1].username == "user"
|
|
assert result[1].password == "pass"
|
|
assert result[2].proto == "http"
|
|
|
|
def test_parse_file_with_proto_filter(self, tmp_path):
|
|
proxy_file = tmp_path / "proxies.txt"
|
|
proxy_file.write_text(
|
|
"socks5://1.2.3.4:1080\n"
|
|
"http://proxy.example.com:8080\n"
|
|
)
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
src = PoolSourceConfig(file=str(proxy_file), proto="socks5")
|
|
result = pool._fetch_file_sync(src)
|
|
assert len(result) == 1
|
|
assert result[0].proto == "socks5"
|
|
|
|
def test_missing_file(self, tmp_path):
|
|
cfg = ProxyPoolConfig(sources=[])
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
src = PoolSourceConfig(file=str(tmp_path / "nonexistent.txt"))
|
|
result = pool._fetch_file_sync(src)
|
|
assert result == []
|
|
|
|
|
|
class TestProxyPoolPersistence:
|
|
"""Test state save/load."""
|
|
|
|
def test_save_and_load(self, tmp_path):
|
|
state_file = str(tmp_path / "pool.json")
|
|
cfg = ProxyPoolConfig(sources=[], state_file=state_file)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
hop = ChainHop(proto="socks5", host="1.2.3.4", port=1080)
|
|
pool._proxies["socks5://1.2.3.4:1080"] = ProxyEntry(
|
|
hop=hop, alive=True, fails=0, tests=5, last_ok=1000.0,
|
|
)
|
|
pool._rebuild_alive()
|
|
pool._save_state()
|
|
|
|
# load into a fresh pool
|
|
pool2 = ProxyPool(cfg, [], timeout=10.0)
|
|
pool2._load_state()
|
|
assert pool2.count == 1
|
|
assert pool2.alive_count == 1
|
|
entry = pool2._proxies["socks5://1.2.3.4:1080"]
|
|
assert entry.hop.host == "1.2.3.4"
|
|
assert entry.tests == 5
|
|
assert entry.alive is True
|
|
|
|
def test_corrupt_state(self, tmp_path):
|
|
state_file = tmp_path / "pool.json"
|
|
state_file.write_text("{invalid json")
|
|
cfg = ProxyPoolConfig(sources=[], state_file=str(state_file))
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
pool._load_state()
|
|
assert pool.count == 0
|
|
|
|
def test_missing_state(self, tmp_path):
|
|
cfg = ProxyPoolConfig(sources=[], state_file=str(tmp_path / "missing.json"))
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
pool._load_state() # should not raise
|
|
assert pool.count == 0
|
|
|
|
def test_state_with_auth(self, tmp_path):
|
|
state_file = str(tmp_path / "pool.json")
|
|
cfg = ProxyPoolConfig(sources=[], state_file=state_file)
|
|
pool = ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
hop = ChainHop(
|
|
proto="socks5", host="1.2.3.4", port=1080,
|
|
username="user", password="pass",
|
|
)
|
|
pool._proxies["socks5://1.2.3.4:1080"] = ProxyEntry(hop=hop, alive=True)
|
|
pool._save_state()
|
|
|
|
pool2 = ProxyPool(cfg, [], timeout=10.0)
|
|
pool2._load_state()
|
|
entry = pool2._proxies["socks5://1.2.3.4:1080"]
|
|
assert entry.hop.username == "user"
|
|
assert entry.hop.password == "pass"
|
|
|
|
|
|
class TestTlsCheck:
|
|
"""Test TLS handshake health check."""
|
|
|
|
def _make_pool(self, **kwargs):
|
|
cfg = ProxyPoolConfig(sources=[], **kwargs)
|
|
return ProxyPool(cfg, [], timeout=10.0)
|
|
|
|
def test_success(self):
|
|
pool = self._make_pool(test_targets=["www.example.com"])
|
|
|
|
mock_writer = MagicMock()
|
|
mock_writer.is_closing.return_value = False
|
|
mock_transport = MagicMock()
|
|
mock_protocol = MagicMock()
|
|
mock_transport.get_protocol.return_value = mock_protocol
|
|
mock_writer.transport = mock_transport
|
|
|
|
new_transport = MagicMock()
|
|
|
|
chain_ret = (MagicMock(), mock_writer)
|
|
with (
|
|
patch("s5p.pool.build_chain", new_callable=AsyncMock, return_value=chain_ret),
|
|
patch("asyncio.get_running_loop") as mock_loop_fn,
|
|
):
|
|
mock_loop = MagicMock()
|
|
mock_loop.start_tls = AsyncMock(return_value=new_transport)
|
|
mock_loop_fn.return_value = mock_loop
|
|
|
|
result = asyncio.run(pool._tls_check([]))
|
|
|
|
assert result is True
|
|
mock_loop.start_tls.assert_called_once_with(
|
|
mock_transport, mock_protocol, pool._ssl_ctx,
|
|
server_hostname="www.example.com",
|
|
)
|
|
new_transport.close.assert_called_once()
|
|
|
|
def test_build_chain_failure(self):
|
|
pool = self._make_pool(test_targets=["www.example.com"])
|
|
|
|
with patch(
|
|
"s5p.pool.build_chain", new_callable=AsyncMock,
|
|
side_effect=ConnectionError("refused"),
|
|
):
|
|
result = asyncio.run(pool._tls_check([]))
|
|
|
|
assert result is False
|
|
|
|
def test_handshake_failure(self):
|
|
pool = self._make_pool(test_targets=["www.example.com"])
|
|
|
|
mock_writer = MagicMock()
|
|
mock_writer.is_closing.return_value = False
|
|
mock_transport = MagicMock()
|
|
mock_transport.get_protocol.return_value = MagicMock()
|
|
mock_writer.transport = mock_transport
|
|
|
|
chain_ret = (MagicMock(), mock_writer)
|
|
with (
|
|
patch("s5p.pool.build_chain", new_callable=AsyncMock, return_value=chain_ret),
|
|
patch("asyncio.get_running_loop") as mock_loop_fn,
|
|
):
|
|
mock_loop = MagicMock()
|
|
mock_loop.start_tls = AsyncMock(
|
|
side_effect=ssl.SSLError("handshake failed"),
|
|
)
|
|
mock_loop_fn.return_value = mock_loop
|
|
|
|
result = asyncio.run(pool._tls_check([]))
|
|
|
|
assert result is False
|
|
|
|
def test_round_robin_rotation(self):
|
|
targets = ["host-a.example.com", "host-b.example.com", "host-c.example.com"]
|
|
pool = self._make_pool(test_targets=targets)
|
|
|
|
selected: list[str] = []
|
|
|
|
async def fake_build_chain(chain, host, port, timeout=None):
|
|
selected.append(host)
|
|
raise ConnectionError("test")
|
|
|
|
with patch("s5p.pool.build_chain", side_effect=fake_build_chain):
|
|
for _ in range(6):
|
|
asyncio.run(pool._tls_check([]))
|
|
|
|
assert selected == ["host-a.example.com", "host-b.example.com", "host-c.example.com",
|
|
"host-a.example.com", "host-b.example.com", "host-c.example.com"]
|
|
|
|
def test_empty_targets(self):
|
|
pool = self._make_pool(test_targets=[])
|
|
result = asyncio.run(pool._tls_check([]))
|
|
assert result is False
|
|
|
|
|
|
class TestProxyPoolConfigCompat:
|
|
"""Test backward compatibility for test_url -> test_targets."""
|
|
|
|
def test_legacy_test_url_converts(self):
|
|
cfg = ProxyPoolConfig(test_url="http://httpbin.org/ip")
|
|
assert cfg.test_targets == ["httpbin.org"]
|
|
|
|
def test_explicit_test_targets_wins(self):
|
|
cfg = ProxyPoolConfig(
|
|
test_url="http://httpbin.org/ip",
|
|
test_targets=["custom.example.com"],
|
|
)
|
|
assert cfg.test_targets == ["custom.example.com"]
|
|
|
|
def test_defaults_when_neither_set(self):
|
|
cfg = ProxyPoolConfig()
|
|
assert cfg.test_targets == ["www.google.com", "www.cloudflare.com", "www.amazon.com"]
|
|
assert cfg.test_url == ""
|