fix: route SearXNG direct via static route, drop proxy

SearXNG instance at 192.168.122.119 is reachable via grokbox
static route -- no need to tunnel through SOCKS5. Reverts searx
and alert plugins to stdlib urlopen for SearXNG queries. YouTube
and Twitch in alert.py still use the proxy. Also removes cprofile
flag from docker-compose command.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
user
2026-02-15 17:52:43 +01:00
parent 23ba7dc474
commit b973635445
4 changed files with 14 additions and 15 deletions

View File

@@ -11,4 +11,4 @@ services:
- ./plugins:/app/plugins:ro,Z - ./plugins:/app/plugins:ro,Z
- ./config/derp.toml:/app/config/derp.toml:ro,Z - ./config/derp.toml:/app/config/derp.toml:ro,Z
- ./data:/app/data:Z - ./data:/app/data:Z
command: ["--verbose", "--cprofile", "/app/data/derp.prof"] command: ["--verbose"]

View File

@@ -26,7 +26,7 @@ _YT_SEARCH_URL = "https://www.youtube.com/youtubei/v1/search"
_YT_CLIENT_VERSION = "2.20250101.00.00" _YT_CLIENT_VERSION = "2.20250101.00.00"
_GQL_URL = "https://gql.twitch.tv/gql" _GQL_URL = "https://gql.twitch.tv/gql"
_GQL_CLIENT_ID = "kimne78kx3ncx6brgo4mv6wki5h1ko" _GQL_CLIENT_ID = "kimne78kx3ncx6brgo4mv6wki5h1ko"
_SEARX_URL = "https://searx.mymx.me/search" _SEARX_URL = "http://192.168.122.119:3000/search"
# -- Module-level tracking --------------------------------------------------- # -- Module-level tracking ---------------------------------------------------
@@ -202,7 +202,7 @@ def _search_searx(keyword: str) -> list[dict]:
url = f"{_SEARX_URL}?{params}" url = f"{_SEARX_URL}?{params}"
req = urllib.request.Request(url, method="GET") req = urllib.request.Request(url, method="GET")
resp = _urlopen(req, timeout=_FETCH_TIMEOUT) resp = urllib.request.urlopen(req, timeout=_FETCH_TIMEOUT)
raw = resp.read() raw = resp.read()
resp.close() resp.close()

View File

@@ -6,12 +6,11 @@ import json
import urllib.parse import urllib.parse
import urllib.request import urllib.request
from derp.http import urlopen as _urlopen
from derp.plugin import command from derp.plugin import command
# -- Constants --------------------------------------------------------------- # -- Constants ---------------------------------------------------------------
_SEARX_URL = "https://searx.mymx.me/search" _SEARX_URL = "http://192.168.122.119:3000/search"
_FETCH_TIMEOUT = 10 _FETCH_TIMEOUT = 10
_MAX_RESULTS = 3 _MAX_RESULTS = 3
_MAX_TITLE_LEN = 80 _MAX_TITLE_LEN = 80
@@ -39,7 +38,7 @@ def _search(query: str) -> list[dict]:
url = f"{_SEARX_URL}?{params}" url = f"{_SEARX_URL}?{params}"
req = urllib.request.Request(url, method="GET") req = urllib.request.Request(url, method="GET")
resp = _urlopen(req, timeout=_FETCH_TIMEOUT) resp = urllib.request.urlopen(req, timeout=_FETCH_TIMEOUT)
raw = resp.read() raw = resp.read()
resp.close() resp.close()

View File

@@ -120,7 +120,7 @@ class TestTruncate:
class TestSearch: class TestSearch:
def test_success(self): def test_success(self):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_RESPONSE)): with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_RESPONSE)):
results = _search("test query") results = _search("test query")
assert len(results) == 5 assert len(results) == 5
assert results[0]["title"] == "Result One" assert results[0]["title"] == "Result One"
@@ -128,14 +128,14 @@ class TestSearch:
assert results[0]["snippet"] == "First snippet" assert results[0]["snippet"] == "First snippet"
def test_empty_results(self): def test_empty_results(self):
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_EMPTY)): with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_EMPTY)):
results = _search("nothing") results = _search("nothing")
assert results == [] assert results == []
def test_http_error_propagates(self): def test_http_error_propagates(self):
import pytest import pytest
with patch.object(_mod, "_urlopen", side_effect=ConnectionError("down")): with patch("urllib.request.urlopen", side_effect=ConnectionError("down")):
with pytest.raises(ConnectionError): with pytest.raises(ConnectionError):
_search("test") _search("test")
@@ -144,7 +144,7 @@ class TestSearch:
data = {"results": [ data = {"results": [
{"title": "T", "url": "http://x.com", "snippet": "fallback"}, {"title": "T", "url": "http://x.com", "snippet": "fallback"},
]} ]}
with patch.object(_mod, "_urlopen", return_value=_FakeResp(data)): with patch("urllib.request.urlopen", return_value=_FakeResp(data)):
results = _search("test") results = _search("test")
assert results[0]["snippet"] == "fallback" assert results[0]["snippet"] == "fallback"
@@ -158,7 +158,7 @@ class TestCmdSearx:
bot = _FakeBot() bot = _FakeBot()
async def inner(): async def inner():
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_RESPONSE)): with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_RESPONSE)):
await cmd_searx(bot, _msg("!searx test query")) await cmd_searx(bot, _msg("!searx test query"))
assert len(bot.replied) == _MAX_RESULTS assert len(bot.replied) == _MAX_RESULTS
assert "Result One" in bot.replied[0] assert "Result One" in bot.replied[0]
@@ -171,7 +171,7 @@ class TestCmdSearx:
bot = _FakeBot() bot = _FakeBot()
async def inner(): async def inner():
with patch.object(_mod, "_urlopen", return_value=_FakeResp(SEARX_EMPTY)): with patch("urllib.request.urlopen", return_value=_FakeResp(SEARX_EMPTY)):
await cmd_searx(bot, _msg("!searx nothing")) await cmd_searx(bot, _msg("!searx nothing"))
assert len(bot.replied) == 1 assert len(bot.replied) == 1
assert "No results for:" in bot.replied[0] assert "No results for:" in bot.replied[0]
@@ -182,7 +182,7 @@ class TestCmdSearx:
bot = _FakeBot() bot = _FakeBot()
async def inner(): async def inner():
with patch.object(_mod, "_urlopen", side_effect=ConnectionError("fail")): with patch("urllib.request.urlopen", side_effect=ConnectionError("fail")):
await cmd_searx(bot, _msg("!searx broken")) await cmd_searx(bot, _msg("!searx broken"))
assert len(bot.replied) == 1 assert len(bot.replied) == 1
assert "Search failed:" in bot.replied[0] assert "Search failed:" in bot.replied[0]
@@ -219,7 +219,7 @@ class TestCmdSearx:
bot = _FakeBot() bot = _FakeBot()
async def inner(): async def inner():
with patch.object(_mod, "_urlopen", return_value=_FakeResp(data)): with patch("urllib.request.urlopen", return_value=_FakeResp(data)):
await cmd_searx(bot, _msg("!searx test")) await cmd_searx(bot, _msg("!searx test"))
assert len(bot.replied) == 1 assert len(bot.replied) == 1
title_part = bot.replied[0].split(" -- ")[0] title_part = bot.replied[0].split(" -- ")[0]
@@ -236,7 +236,7 @@ class TestCmdSearx:
bot = _FakeBot() bot = _FakeBot()
async def inner(): async def inner():
with patch.object(_mod, "_urlopen", return_value=_FakeResp(data)): with patch("urllib.request.urlopen", return_value=_FakeResp(data)):
await cmd_searx(bot, _msg("!searx test")) await cmd_searx(bot, _msg("!searx test"))
assert "(no title)" in bot.replied[0] assert "(no title)" in bot.replied[0]