fix: route searx and alert SearXNG traffic through SOCKS5 proxy

Both plugins called urllib.request.urlopen directly, bypassing the
proxy. Switch to derp.http.urlopen and update the SearXNG endpoint
to the public domain (searx.mymx.me). Update test mocks to match.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
user
2026-02-15 16:56:45 +01:00
parent 6e591a85b2
commit 29e77f97b2
4 changed files with 17 additions and 16 deletions

View File

@@ -26,7 +26,7 @@ _YT_SEARCH_URL = "https://www.youtube.com/youtubei/v1/search"
_YT_CLIENT_VERSION = "2.20250101.00.00"
_GQL_URL = "https://gql.twitch.tv/gql"
_GQL_CLIENT_ID = "kimne78kx3ncx6brgo4mv6wki5h1ko"
_SEARX_URL = "http://192.168.122.119:3000/search"
_SEARX_URL = "https://searx.mymx.me/search"
# -- Module-level tracking ---------------------------------------------------
@@ -202,7 +202,7 @@ def _search_searx(keyword: str) -> list[dict]:
url = f"{_SEARX_URL}?{params}"
req = urllib.request.Request(url, method="GET")
resp = urllib.request.urlopen(req, timeout=_FETCH_TIMEOUT)
resp = _urlopen(req, timeout=_FETCH_TIMEOUT)
raw = resp.read()
resp.close()

View File

@@ -6,11 +6,12 @@ import json
import urllib.parse
import urllib.request
from derp.http import urlopen as _urlopen
from derp.plugin import command
# -- Constants ---------------------------------------------------------------
_SEARX_URL = "http://192.168.122.119:3000/search"
_SEARX_URL = "https://searx.mymx.me/search"
_FETCH_TIMEOUT = 10
_MAX_RESULTS = 3
_MAX_TITLE_LEN = 80
@@ -38,7 +39,7 @@ def _search(query: str) -> list[dict]:
url = f"{_SEARX_URL}?{params}"
req = urllib.request.Request(url, method="GET")
resp = urllib.request.urlopen(req, timeout=_FETCH_TIMEOUT)
resp = _urlopen(req, timeout=_FETCH_TIMEOUT)
raw = resp.read()
resp.close()

View File

@@ -1220,7 +1220,7 @@ class TestSearchSearx:
def close(self):
pass
with patch("urllib.request.urlopen", return_value=FakeResp()):
with patch.object(_mod, "_urlopen", return_value=FakeResp()):
results = _search_searx("test query")
assert len(results) == 3
assert results[0]["id"] == "https://example.com/sx1"
@@ -1237,13 +1237,13 @@ class TestSearchSearx:
def close(self):
pass
with patch("urllib.request.urlopen", return_value=FakeResp()):
with patch.object(_mod, "_urlopen", return_value=FakeResp()):
results = _search_searx("nothing")
assert results == []
def test_http_error_propagates(self):
import pytest
with patch("urllib.request.urlopen", side_effect=ConnectionError("fail")):
with patch.object(_mod, "_urlopen", side_effect=ConnectionError("fail")):
with pytest.raises(ConnectionError):
_search_searx("test")

View File

@@ -120,7 +120,7 @@ class TestTruncate:
class TestSearch:
def test_success(self):
with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_RESPONSE)):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_RESPONSE)):
results = _search("test query")
assert len(results) == 5
assert results[0]["title"] == "Result One"
@@ -128,14 +128,14 @@ class TestSearch:
assert results[0]["snippet"] == "First snippet"
def test_empty_results(self):
with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_EMPTY)):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_EMPTY)):
results = _search("nothing")
assert results == []
def test_http_error_propagates(self):
import pytest
with patch("urllib.request.urlopen", side_effect=ConnectionError("down")):
with patch.object(_mod, "_urlopen", side_effect=ConnectionError("down")):
with pytest.raises(ConnectionError):
_search("test")
@@ -144,7 +144,7 @@ class TestSearch:
data = {"results": [
{"title": "T", "url": "http://x.com", "snippet": "fallback"},
]}
with patch("urllib.request.urlopen", return_value=_FakeResp(data)):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(data)):
results = _search("test")
assert results[0]["snippet"] == "fallback"
@@ -158,7 +158,7 @@ class TestCmdSearx:
bot = _FakeBot()
async def inner():
with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_RESPONSE)):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_RESPONSE)):
await cmd_searx(bot, _msg("!searx test query"))
assert len(bot.replied) == _MAX_RESULTS
assert "Result One" in bot.replied[0]
@@ -171,7 +171,7 @@ class TestCmdSearx:
bot = _FakeBot()
async def inner():
with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_EMPTY)):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_EMPTY)):
await cmd_searx(bot, _msg("!searx nothing"))
assert len(bot.replied) == 1
assert "No results for:" in bot.replied[0]
@@ -182,7 +182,7 @@ class TestCmdSearx:
bot = _FakeBot()
async def inner():
with patch("urllib.request.urlopen", side_effect=ConnectionError("fail")):
with patch.object(_mod, "_urlopen", side_effect=ConnectionError("fail")):
await cmd_searx(bot, _msg("!searx broken"))
assert len(bot.replied) == 1
assert "Search failed:" in bot.replied[0]
@@ -219,7 +219,7 @@ class TestCmdSearx:
bot = _FakeBot()
async def inner():
with patch("urllib.request.urlopen", return_value=_FakeResp(data)):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(data)):
await cmd_searx(bot, _msg("!searx test"))
assert len(bot.replied) == 1
title_part = bot.replied[0].split(" -- ")[0]
@@ -236,7 +236,7 @@ class TestCmdSearx:
bot = _FakeBot()
async def inner():
with patch("urllib.request.urlopen", return_value=_FakeResp(data)):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(data)):
await cmd_searx(bot, _msg("!searx test"))
assert "(no title)" in bot.replied[0]