fix: route alert YouTube/SearXNG through pooled urlopen
- YouTube InnerTube search: urllib.request.urlopen -> _urlopen (gets connection pooling + SOCKS5 proxy) - SearXNG search: urllib.request.urlopen -> _urlopen(proxy=False) (local service, skip proxy, get pooling) - Update 5 tests to patch _urlopen instead of urllib.request.urlopen Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2
TASKS.md
2
TASKS.md
@@ -23,7 +23,7 @@
|
||||
| P0 | [x] | `plugins/searx.py` -- route through `derp.http.urlopen(proxy=False)` |
|
||||
| P1 | [x] | Connection pool: `preload_content=True` + `_PooledResponse` wrapper for connection reuse |
|
||||
| P1 | [x] | Pool tuning: `num_pools=30, maxsize=8` (was 20/4) |
|
||||
| P2 | [ ] | Audit remaining plugins for unnecessary proxy routing |
|
||||
| P2 | [x] | Audit remaining plugins for unnecessary proxy routing |
|
||||
|
||||
## Previous Sprint -- Music Discovery via Last.fm (2026-02-22)
|
||||
|
||||
|
||||
@@ -436,7 +436,7 @@ def _search_youtube(keyword: str) -> list[dict]:
|
||||
req = urllib.request.Request(_YT_SEARCH_URL, data=payload, method="POST")
|
||||
req.add_header("Content-Type", "application/json")
|
||||
|
||||
resp = urllib.request.urlopen(req, timeout=_FETCH_TIMEOUT)
|
||||
resp = _urlopen(req, timeout=_FETCH_TIMEOUT)
|
||||
raw = resp.read()
|
||||
resp.close()
|
||||
|
||||
@@ -545,7 +545,7 @@ def _search_searx(keyword: str) -> list[dict]:
|
||||
})
|
||||
req = urllib.request.Request(f"{_SEARX_URL}?{params}", method="GET")
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=_FETCH_TIMEOUT)
|
||||
resp = _urlopen(req, timeout=_FETCH_TIMEOUT, proxy=False)
|
||||
raw = resp.read()
|
||||
resp.close()
|
||||
except Exception as exc:
|
||||
|
||||
@@ -420,7 +420,7 @@ class TestExtractVideos:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
with patch("urllib.request.urlopen", return_value=FakeResp()):
|
||||
with patch.object(_mod, "_urlopen", return_value=FakeResp()):
|
||||
results = _search_youtube("test")
|
||||
assert len(results) == 1
|
||||
assert results[0]["id"] == "dup1"
|
||||
@@ -438,7 +438,7 @@ class TestSearchYoutube:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
with patch("urllib.request.urlopen", return_value=FakeResp()):
|
||||
with patch.object(_mod, "_urlopen", return_value=FakeResp()):
|
||||
results = _search_youtube("test query")
|
||||
assert len(results) == 2
|
||||
assert results[0]["id"] == "abc123"
|
||||
@@ -446,7 +446,7 @@ class TestSearchYoutube:
|
||||
|
||||
def test_http_error_propagates(self):
|
||||
import pytest
|
||||
with patch("urllib.request.urlopen", side_effect=ConnectionError("fail")):
|
||||
with patch.object(_mod, "_urlopen", side_effect=ConnectionError("fail")):
|
||||
with pytest.raises(ConnectionError):
|
||||
_search_youtube("test")
|
||||
|
||||
@@ -1263,7 +1263,7 @@ class TestSearchSearx:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
with patch("urllib.request.urlopen", return_value=FakeResp()):
|
||||
with patch.object(_mod, "_urlopen", return_value=FakeResp()):
|
||||
results = _search_searx("test query")
|
||||
# Same response served for all categories; deduped by URL
|
||||
assert len(results) == 3
|
||||
@@ -1281,13 +1281,13 @@ class TestSearchSearx:
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
with patch("urllib.request.urlopen", return_value=FakeResp()):
|
||||
with patch.object(_mod, "_urlopen", return_value=FakeResp()):
|
||||
results = _search_searx("nothing")
|
||||
assert results == []
|
||||
|
||||
def test_http_error_returns_empty(self):
|
||||
"""SearXNG catches per-category errors; all failing returns empty."""
|
||||
with patch("urllib.request.urlopen", side_effect=ConnectionError("fail")):
|
||||
with patch.object(_mod, "_urlopen", side_effect=ConnectionError("fail")):
|
||||
results = _search_searx("test")
|
||||
assert results == []
|
||||
|
||||
|
||||
Reference in New Issue
Block a user