httpd: add stats export endpoint with CSV/JSON support
This commit is contained in:
56
httpd.py
56
httpd.py
@@ -724,6 +724,17 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
def send_js(self, js, status=200):
|
||||
self.send_response_body(js, 'application/javascript; charset=utf-8', status)
|
||||
|
||||
def send_download(self, body, content_type, filename):
|
||||
"""Send response with Content-Disposition for download."""
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', content_type)
|
||||
self.send_header('Content-Length', len(body))
|
||||
self.send_header('Content-Disposition', 'attachment; filename="%s"' % filename)
|
||||
for header, value in get_security_headers(content_type):
|
||||
self.send_header(header, value)
|
||||
self.end_headers()
|
||||
self.wfile.write(body)
|
||||
|
||||
def do_GET(self):
|
||||
# Rate limiting check
|
||||
client_ip = self.client_address[0] if self.client_address else ''
|
||||
@@ -745,6 +756,7 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
'/static/style.css': self.handle_css,
|
||||
'/static/dashboard.js': self.handle_js,
|
||||
'/api/stats': self.handle_stats,
|
||||
'/api/stats/export': self.handle_stats_export,
|
||||
'/api/countries': self.handle_countries,
|
||||
'/proxies': self.handle_proxies,
|
||||
'/proxies/count': self.handle_count,
|
||||
@@ -761,6 +773,7 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
'endpoints': {
|
||||
'/dashboard': 'web dashboard (HTML)',
|
||||
'/api/stats': 'runtime statistics (JSON)',
|
||||
'/api/stats/export': 'export stats (params: format=json|csv)',
|
||||
'/proxies': 'list working proxies (params: limit, proto, country, asn)',
|
||||
'/proxies/count': 'count working proxies',
|
||||
'/health': 'health check',
|
||||
@@ -851,6 +864,49 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
|
||||
self.send_json(stats)
|
||||
|
||||
def handle_stats_export(self):
|
||||
"""Export stats as JSON or CSV with download header."""
|
||||
params = {}
|
||||
if '?' in self.path:
|
||||
for pair in self.path.split('?')[1].split('&'):
|
||||
if '=' in pair:
|
||||
k, v = pair.split('=', 1)
|
||||
params[k] = v
|
||||
|
||||
fmt = params.get('format', 'json').lower()
|
||||
timestamp = time.strftime('%Y%m%d_%H%M%S')
|
||||
|
||||
# Gather stats
|
||||
stats = {}
|
||||
if self.stats_provider:
|
||||
try:
|
||||
stats.update(self.stats_provider())
|
||||
except Exception:
|
||||
pass
|
||||
stats['system'] = get_system_stats()
|
||||
stats['exported_at'] = time.strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
if fmt == 'csv':
|
||||
# Flatten stats to CSV rows
|
||||
lines = ['key,value']
|
||||
def flatten(obj, prefix=''):
|
||||
if isinstance(obj, dict):
|
||||
for k, v in sorted(obj.items()):
|
||||
flatten(v, '%s%s.' % (prefix, k) if prefix else '%s.' % k)
|
||||
elif isinstance(obj, (list, tuple)):
|
||||
for i, v in enumerate(obj):
|
||||
flatten(v, '%s%d.' % (prefix, i))
|
||||
else:
|
||||
key = prefix.rstrip('.')
|
||||
val = str(obj).replace('"', '""')
|
||||
lines.append('"%s","%s"' % (key, val))
|
||||
flatten(stats)
|
||||
body = '\n'.join(lines)
|
||||
self.send_download(body, 'text/csv', 'ppf_stats_%s.csv' % timestamp)
|
||||
else:
|
||||
body = json.dumps(stats, indent=2)
|
||||
self.send_download(body, 'application/json', 'ppf_stats_%s.json' % timestamp)
|
||||
|
||||
def handle_proxies(self):
|
||||
params = {}
|
||||
if '?' in self.path:
|
||||
|
||||
Reference in New Issue
Block a user