httpd: add freshness filter, mitm param, and provenance to upsert
Export endpoints now require last_seen within 60 minutes. Add optional mitm=0|1 query parameter to filter MITM proxies. Fix upsert to track success_count, consecutive_success, last_check, and last_target.
This commit is contained in:
52
httpd.py
52
httpd.py
@@ -731,20 +731,28 @@ def submit_proxy_reports(db, worker_id, proxies):
|
|||||||
proto = p.get('proto', 'http')
|
proto = p.get('proto', 'http')
|
||||||
latency = p.get('latency', 0)
|
latency = p.get('latency', 0)
|
||||||
source_url = p.get('source_url')
|
source_url = p.get('source_url')
|
||||||
|
checktype = p.get('checktype', '')
|
||||||
|
target = p.get('target', '')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Upsert: insert new proxy or update existing as working
|
# Upsert: insert new proxy or update existing as working
|
||||||
db.execute('''
|
db.execute('''
|
||||||
INSERT INTO proxylist (proxy, ip, port, proto, failed, tested, added,
|
INSERT INTO proxylist (proxy, ip, port, proto, failed, tested, added,
|
||||||
avg_latency, last_seen)
|
avg_latency, last_seen, success_count,
|
||||||
VALUES (?, ?, ?, ?, 0, ?, ?, ?, ?)
|
consecutive_success, last_check, last_target)
|
||||||
|
VALUES (?, ?, ?, ?, 0, ?, ?, ?, ?, 1, 1, ?, ?)
|
||||||
ON CONFLICT(proxy) DO UPDATE SET
|
ON CONFLICT(proxy) DO UPDATE SET
|
||||||
failed = 0,
|
failed = 0,
|
||||||
tested = excluded.tested,
|
tested = excluded.tested,
|
||||||
proto = excluded.proto,
|
proto = excluded.proto,
|
||||||
avg_latency = excluded.avg_latency,
|
avg_latency = excluded.avg_latency,
|
||||||
last_seen = excluded.last_seen
|
last_seen = excluded.last_seen,
|
||||||
''', (proxy_key, ip, port, proto, now_int, now_int, latency, now_int))
|
success_count = COALESCE(success_count, 0) + 1,
|
||||||
|
consecutive_success = COALESCE(consecutive_success, 0) + 1,
|
||||||
|
last_check = excluded.last_check,
|
||||||
|
last_target = excluded.last_target
|
||||||
|
''', (proxy_key, ip, port, proto, now_int, now_int, latency, now_int,
|
||||||
|
checktype, target))
|
||||||
|
|
||||||
# Geolocate if IP2Location available
|
# Geolocate if IP2Location available
|
||||||
if _geolite and _geodb:
|
if _geolite and _geodb:
|
||||||
@@ -1442,9 +1450,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
proto = params.get('proto', '')
|
proto = params.get('proto', '')
|
||||||
country = params.get('country', '')
|
country = params.get('country', '')
|
||||||
asn = params.get('asn', '')
|
asn = params.get('asn', '')
|
||||||
|
mitm_filter = params.get('mitm', '')
|
||||||
fmt = params.get('format', 'json')
|
fmt = params.get('format', 'json')
|
||||||
|
|
||||||
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
|
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
|
||||||
args = []
|
args = []
|
||||||
|
|
||||||
if proto:
|
if proto:
|
||||||
@@ -1456,6 +1465,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
if asn:
|
if asn:
|
||||||
sql += ' AND asn=?'
|
sql += ' AND asn=?'
|
||||||
args.append(int(asn))
|
args.append(int(asn))
|
||||||
|
if mitm_filter == '0':
|
||||||
|
sql += ' AND mitm=0'
|
||||||
|
elif mitm_filter == '1':
|
||||||
|
sql += ' AND mitm=1'
|
||||||
|
|
||||||
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
|
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
@@ -1487,9 +1500,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
proto = params.get('proto', '')
|
proto = params.get('proto', '')
|
||||||
country = params.get('country', '')
|
country = params.get('country', '')
|
||||||
asn = params.get('asn', '')
|
asn = params.get('asn', '')
|
||||||
|
mitm_filter = params.get('mitm', '')
|
||||||
fmt = params.get('format', 'json')
|
fmt = params.get('format', 'json')
|
||||||
|
|
||||||
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
|
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
|
||||||
args = []
|
args = []
|
||||||
|
|
||||||
if proto:
|
if proto:
|
||||||
@@ -1501,6 +1515,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|||||||
if asn:
|
if asn:
|
||||||
sql += ' AND asn=?'
|
sql += ' AND asn=?'
|
||||||
args.append(int(asn))
|
args.append(int(asn))
|
||||||
|
if mitm_filter == '0':
|
||||||
|
sql += ' AND mitm=0'
|
||||||
|
elif mitm_filter == '1':
|
||||||
|
sql += ' AND mitm=1'
|
||||||
|
|
||||||
sql += ' ORDER BY avg_latency ASC, tested DESC'
|
sql += ' ORDER BY avg_latency ASC, tested DESC'
|
||||||
|
|
||||||
@@ -1817,9 +1835,10 @@ class ProxyAPIServer(threading.Thread):
|
|||||||
proto = query_params.get('proto', '')
|
proto = query_params.get('proto', '')
|
||||||
country = query_params.get('country', '')
|
country = query_params.get('country', '')
|
||||||
asn = query_params.get('asn', '')
|
asn = query_params.get('asn', '')
|
||||||
|
mitm_filter = query_params.get('mitm', '')
|
||||||
fmt = query_params.get('format', 'json')
|
fmt = query_params.get('format', 'json')
|
||||||
|
|
||||||
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
|
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
|
||||||
args = []
|
args = []
|
||||||
if proto:
|
if proto:
|
||||||
sql += ' AND proto=?'
|
sql += ' AND proto=?'
|
||||||
@@ -1830,6 +1849,10 @@ class ProxyAPIServer(threading.Thread):
|
|||||||
if asn:
|
if asn:
|
||||||
sql += ' AND asn=?'
|
sql += ' AND asn=?'
|
||||||
args.append(int(asn))
|
args.append(int(asn))
|
||||||
|
if mitm_filter == '0':
|
||||||
|
sql += ' AND mitm=0'
|
||||||
|
elif mitm_filter == '1':
|
||||||
|
sql += ' AND mitm=1'
|
||||||
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
|
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
|
|
||||||
@@ -1851,9 +1874,10 @@ class ProxyAPIServer(threading.Thread):
|
|||||||
proto = query_params.get('proto', '')
|
proto = query_params.get('proto', '')
|
||||||
country = query_params.get('country', '')
|
country = query_params.get('country', '')
|
||||||
asn = query_params.get('asn', '')
|
asn = query_params.get('asn', '')
|
||||||
|
mitm_filter = query_params.get('mitm', '')
|
||||||
fmt = query_params.get('format', 'json')
|
fmt = query_params.get('format', 'json')
|
||||||
|
|
||||||
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
|
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
|
||||||
args = []
|
args = []
|
||||||
if proto:
|
if proto:
|
||||||
sql += ' AND proto=?'
|
sql += ' AND proto=?'
|
||||||
@@ -1864,6 +1888,10 @@ class ProxyAPIServer(threading.Thread):
|
|||||||
if asn:
|
if asn:
|
||||||
sql += ' AND asn=?'
|
sql += ' AND asn=?'
|
||||||
args.append(int(asn))
|
args.append(int(asn))
|
||||||
|
if mitm_filter == '0':
|
||||||
|
sql += ' AND mitm=0'
|
||||||
|
elif mitm_filter == '1':
|
||||||
|
sql += ' AND mitm=1'
|
||||||
sql += ' ORDER BY avg_latency ASC, tested DESC'
|
sql += ' ORDER BY avg_latency ASC, tested DESC'
|
||||||
|
|
||||||
db = mysqlite.mysqlite(self.database, str)
|
db = mysqlite.mysqlite(self.database, str)
|
||||||
@@ -1881,8 +1909,14 @@ class ProxyAPIServer(threading.Thread):
|
|||||||
return json.dumps({'error': str(e)}), 'application/json', 500
|
return json.dumps({'error': str(e)}), 'application/json', 500
|
||||||
elif path == '/proxies/count':
|
elif path == '/proxies/count':
|
||||||
try:
|
try:
|
||||||
|
mitm_filter = query_params.get('mitm', '')
|
||||||
|
sql = 'SELECT COUNT(*) FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
|
||||||
|
if mitm_filter == '0':
|
||||||
|
sql += ' AND mitm=0'
|
||||||
|
elif mitm_filter == '1':
|
||||||
|
sql += ' AND mitm=1'
|
||||||
db = mysqlite.mysqlite(self.database, str)
|
db = mysqlite.mysqlite(self.database, str)
|
||||||
row = db.execute('SELECT COUNT(*) FROM proxylist WHERE failed=0 AND proto IS NOT NULL').fetchone()
|
row = db.execute(sql).fetchone()
|
||||||
return json.dumps({'count': row[0] if row else 0}), 'application/json', 200
|
return json.dumps({'count': row[0] if row else 0}), 'application/json', 200
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return json.dumps({'error': str(e)}), 'application/json', 500
|
return json.dumps({'error': str(e)}), 'application/json', 500
|
||||||
|
|||||||
Reference in New Issue
Block a user