httpd: add freshness filter, mitm param, and provenance to upsert

Export endpoints now require last_seen within 60 minutes. Add optional
mitm=0|1 query parameter to filter MITM proxies. Fix upsert to track
success_count, consecutive_success, last_check, and last_target.
This commit is contained in:
Username
2026-02-17 21:06:27 +01:00
parent dfcd8f0c00
commit ba9553f4aa

View File

@@ -731,20 +731,28 @@ def submit_proxy_reports(db, worker_id, proxies):
proto = p.get('proto', 'http')
latency = p.get('latency', 0)
source_url = p.get('source_url')
checktype = p.get('checktype', '')
target = p.get('target', '')
try:
# Upsert: insert new proxy or update existing as working
db.execute('''
INSERT INTO proxylist (proxy, ip, port, proto, failed, tested, added,
avg_latency, last_seen)
VALUES (?, ?, ?, ?, 0, ?, ?, ?, ?)
avg_latency, last_seen, success_count,
consecutive_success, last_check, last_target)
VALUES (?, ?, ?, ?, 0, ?, ?, ?, ?, 1, 1, ?, ?)
ON CONFLICT(proxy) DO UPDATE SET
failed = 0,
tested = excluded.tested,
proto = excluded.proto,
avg_latency = excluded.avg_latency,
last_seen = excluded.last_seen
''', (proxy_key, ip, port, proto, now_int, now_int, latency, now_int))
last_seen = excluded.last_seen,
success_count = COALESCE(success_count, 0) + 1,
consecutive_success = COALESCE(consecutive_success, 0) + 1,
last_check = excluded.last_check,
last_target = excluded.last_target
''', (proxy_key, ip, port, proto, now_int, now_int, latency, now_int,
checktype, target))
# Geolocate if IP2Location available
if _geolite and _geodb:
@@ -1442,9 +1450,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
proto = params.get('proto', '')
country = params.get('country', '')
asn = params.get('asn', '')
mitm_filter = params.get('mitm', '')
fmt = params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
@@ -1456,6 +1465,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
args.append(limit)
@@ -1487,9 +1500,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
proto = params.get('proto', '')
country = params.get('country', '')
asn = params.get('asn', '')
mitm_filter = params.get('mitm', '')
fmt = params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
@@ -1501,6 +1515,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC'
@@ -1817,9 +1835,10 @@ class ProxyAPIServer(threading.Thread):
proto = query_params.get('proto', '')
country = query_params.get('country', '')
asn = query_params.get('asn', '')
mitm_filter = query_params.get('mitm', '')
fmt = query_params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
sql += ' AND proto=?'
@@ -1830,6 +1849,10 @@ class ProxyAPIServer(threading.Thread):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
args.append(limit)
@@ -1851,9 +1874,10 @@ class ProxyAPIServer(threading.Thread):
proto = query_params.get('proto', '')
country = query_params.get('country', '')
asn = query_params.get('asn', '')
mitm_filter = query_params.get('mitm', '')
fmt = query_params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
sql += ' AND proto=?'
@@ -1864,6 +1888,10 @@ class ProxyAPIServer(threading.Thread):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC'
db = mysqlite.mysqlite(self.database, str)
@@ -1881,8 +1909,14 @@ class ProxyAPIServer(threading.Thread):
return json.dumps({'error': str(e)}), 'application/json', 500
elif path == '/proxies/count':
try:
mitm_filter = query_params.get('mitm', '')
sql = 'SELECT COUNT(*) FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
db = mysqlite.mysqlite(self.database, str)
row = db.execute('SELECT COUNT(*) FROM proxylist WHERE failed=0 AND proto IS NOT NULL').fetchone()
row = db.execute(sql).fetchone()
return json.dumps({'count': row[0] if row else 0}), 'application/json', 200
except Exception as e:
return json.dumps({'error': str(e)}), 'application/json', 500