feat: add Hacker News and GitHub backends to alert plugin

Hacker News (hn) uses Algolia search_by_date API for stories,
appends point count to title, falls back to HN discussion URL
when no external link. GitHub (gh) searches repositories sorted
by recently updated, shows star count and truncated description.
Both routed through SOCKS5 proxy via _urlopen.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
user
2026-02-15 23:10:00 +01:00
parent f0b198d98a
commit 5ded8186dd
3 changed files with 99 additions and 6 deletions

View File

@@ -61,6 +61,8 @@ _LEMMY_INSTANCES = [
_LEMMY_TIMEOUT = 4
_ODYSEE_API = "https://api.na-backend.odysee.com/api/v1/proxy"
_ARCHIVE_SEARCH_URL = "https://archive.org/advancedsearch.php"
_HN_SEARCH_URL = "https://hn.algolia.com/api/v1/search_by_date"
_GITHUB_SEARCH_URL = "https://api.github.com/search/repositories"
# -- Module-level tracking ---------------------------------------------------
@@ -1002,6 +1004,92 @@ def _search_archive(keyword: str) -> list[dict]:
return results
# -- Hacker News search (blocking) ------------------------------------------
def _search_hackernews(keyword: str) -> list[dict]:
"""Search Hacker News via Algolia API, sorted by date. Blocking."""
import urllib.parse
params = urllib.parse.urlencode({
"query": keyword, "tags": "story", "hitsPerPage": "25",
})
url = f"{_HN_SEARCH_URL}?{params}"
req = urllib.request.Request(url, method="GET")
req.add_header("User-Agent", "Mozilla/5.0 (compatible; derp-bot)")
resp = _urlopen(req, timeout=_FETCH_TIMEOUT)
raw = resp.read()
resp.close()
data = json.loads(raw)
results: list[dict] = []
for hit in data.get("hits") or []:
object_id = hit.get("objectID", "")
if not object_id:
continue
title = hit.get("title", "")
# External URL if available, otherwise HN discussion link
item_url = hit.get("url") or f"https://news.ycombinator.com/item?id={object_id}"
date = _parse_date(hit.get("created_at", ""))
points = hit.get("points")
if points:
title += f" ({points}pts)"
results.append({
"id": object_id,
"title": title,
"url": item_url,
"date": date,
"extra": "",
})
return results
# -- GitHub search (blocking) -----------------------------------------------
def _search_github(keyword: str) -> list[dict]:
"""Search GitHub repositories via public API. Blocking."""
import urllib.parse
params = urllib.parse.urlencode({
"q": keyword, "sort": "updated", "order": "desc", "per_page": "25",
})
url = f"{_GITHUB_SEARCH_URL}?{params}"
req = urllib.request.Request(url, method="GET")
req.add_header("Accept", "application/vnd.github+json")
req.add_header("User-Agent", "Mozilla/5.0 (compatible; derp-bot)")
resp = _urlopen(req, timeout=_FETCH_TIMEOUT)
raw = resp.read()
resp.close()
data = json.loads(raw)
results: list[dict] = []
for repo in data.get("items") or []:
repo_id = str(repo.get("id", ""))
if not repo_id:
continue
full_name = repo.get("full_name", "")
description = repo.get("description") or ""
html_url = repo.get("html_url", "")
stars = repo.get("stargazers_count", 0)
title = full_name
if description:
title += f": {_truncate(description, 50)}"
if stars:
title += f" [{stars}*]"
date = _parse_date(repo.get("updated_at", ""))
results.append({
"id": repo_id,
"title": title,
"url": html_url,
"date": date,
"extra": "",
})
return results
# -- Backend registry -------------------------------------------------------
_BACKENDS: dict[str, callable] = {
@@ -1019,6 +1107,8 @@ _BACKENDS: dict[str, callable] = {
"ly": _search_lemmy,
"od": _search_odysee,
"ia": _search_archive,
"hn": _search_hackernews,
"gh": _search_github,
}