Compare commits

...

7 Commits

Author SHA1 Message Date
Username
ff21c75a7a add .venv to .gitignore
Some checks failed
CI / syntax-check (push) Failing after 1s
CI / memory-leak-check (push) Successful in 17s
2026-02-17 21:07:16 +01:00
Username
e0e56935f2 todo: add V1 worker deprecation task 2026-02-17 21:06:35 +01:00
Username
9ecf7d89bd servers: refresh list from mirc.com (128 servers) 2026-02-17 21:06:31 +01:00
Username
ba9553f4aa httpd: add freshness filter, mitm param, and provenance to upsert
Export endpoints now require last_seen within 60 minutes. Add optional
mitm=0|1 query parameter to filter MITM proxies. Fix upsert to track
success_count, consecutive_success, last_check, and last_target.
2026-02-17 21:06:27 +01:00
Username
dfcd8f0c00 add test provenance columns and worker report fields
Add last_check/last_target columns to proxylist schema with migration.
Include checktype and target in V2 worker report payload.
2026-02-17 21:06:21 +01:00
Username
4c5f4fa01d watchd: add protocol fingerprint probes and fix nullable counters
Add lightweight SOCKS5/SOCKS4/HTTP handshake probes to identify proxy
protocol before full testing. Guard consecutive_success, success_count,
and total_duration against NoneType from worker-reported upserts.
Track last_check and last_target for test provenance.
2026-02-17 21:06:16 +01:00
Username
31bdb76a97 config: add fingerprint option for protocol probing 2026-02-17 21:06:10 +01:00
8 changed files with 272 additions and 47 deletions

1
.gitignore vendored
View File

@@ -6,4 +6,5 @@ __pycache__/
*.sqlite-shm
*.sqlite-wal
.claude/
.venv/
data/

12
TODO.md
View File

@@ -49,6 +49,18 @@ Optimize only if memory becomes a constraint.
---
## Deprecation
### [ ] Remove V1 worker protocol
- V2 workers (URL-driven) are the standard; no V1 workers remain active
- Remove `--worker` flag and V1 code path in ppf.py
- Remove `/api/claim`, `/api/submit` V1 endpoints in httpd.py
- Remove V1 heartbeat/registration handling
- Clean up any V1-specific state tracking in proxywatchd.py
---
## Known Issues
### [!] Podman Container Metadata Disappears

View File

@@ -123,6 +123,7 @@ class Config(ComboParser):
self.add_item(section, 'checktype', str, 'head', 'secondary check type: head, irc, judges, none/false (none = SSL-only)', False)
self.add_item(section, 'ssl_first', bool, True, 'try SSL handshake first, fallback to checktype on failure (default: True)', False)
self.add_item(section, 'ssl_only', bool, False, 'when ssl_first enabled, skip secondary check on SSL failure (default: False)', False)
self.add_item(section, 'fingerprint', bool, True, 'probe proxy protocol before testing (default: True)', False)
self.add_item(section, 'scale_cooldown', int, 10, 'seconds between thread scaling decisions (default: 10)', False)
self.add_item(section, 'scale_threshold', float, 10.0, 'min success rate % to scale up threads (default: 10.0)', False)

15
dbs.py
View File

@@ -98,6 +98,16 @@ def _migrate_last_seen(sqlite):
sqlite.commit()
def _migrate_last_check_columns(sqlite):
"""Add last_check and last_target columns for test provenance tracking."""
for col, typedef in (('last_check', 'TEXT'), ('last_target', 'TEXT')):
try:
sqlite.execute('SELECT %s FROM proxylist LIMIT 1' % col)
except Exception:
sqlite.execute('ALTER TABLE proxylist ADD COLUMN %s %s' % (col, typedef))
sqlite.commit()
def _migrate_uri_check_interval(sqlite):
"""Add adaptive check_interval column to uris table."""
try:
@@ -371,7 +381,9 @@ def create_table_if_not_exists(sqlite, dbname):
source_proto TEXT,
source_confidence INT DEFAULT 0,
protos_working TEXT,
last_seen INT DEFAULT 0)""")
last_seen INT DEFAULT 0,
last_check TEXT,
last_target TEXT)""")
# Migration: add columns to existing databases (must run before creating indexes)
_migrate_latency_columns(sqlite)
_migrate_anonymity_columns(sqlite)
@@ -381,6 +393,7 @@ def create_table_if_not_exists(sqlite, dbname):
_migrate_source_proto(sqlite)
_migrate_protos_working(sqlite)
_migrate_last_seen(sqlite)
_migrate_last_check_columns(sqlite)
# Indexes for common query patterns
sqlite.execute('CREATE INDEX IF NOT EXISTS idx_proxylist_failed ON proxylist(failed)')
sqlite.execute('CREATE INDEX IF NOT EXISTS idx_proxylist_tested ON proxylist(tested)')

View File

@@ -731,20 +731,28 @@ def submit_proxy_reports(db, worker_id, proxies):
proto = p.get('proto', 'http')
latency = p.get('latency', 0)
source_url = p.get('source_url')
checktype = p.get('checktype', '')
target = p.get('target', '')
try:
# Upsert: insert new proxy or update existing as working
db.execute('''
INSERT INTO proxylist (proxy, ip, port, proto, failed, tested, added,
avg_latency, last_seen)
VALUES (?, ?, ?, ?, 0, ?, ?, ?, ?)
avg_latency, last_seen, success_count,
consecutive_success, last_check, last_target)
VALUES (?, ?, ?, ?, 0, ?, ?, ?, ?, 1, 1, ?, ?)
ON CONFLICT(proxy) DO UPDATE SET
failed = 0,
tested = excluded.tested,
proto = excluded.proto,
avg_latency = excluded.avg_latency,
last_seen = excluded.last_seen
''', (proxy_key, ip, port, proto, now_int, now_int, latency, now_int))
last_seen = excluded.last_seen,
success_count = COALESCE(success_count, 0) + 1,
consecutive_success = COALESCE(consecutive_success, 0) + 1,
last_check = excluded.last_check,
last_target = excluded.last_target
''', (proxy_key, ip, port, proto, now_int, now_int, latency, now_int,
checktype, target))
# Geolocate if IP2Location available
if _geolite and _geodb:
@@ -1442,9 +1450,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
proto = params.get('proto', '')
country = params.get('country', '')
asn = params.get('asn', '')
mitm_filter = params.get('mitm', '')
fmt = params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
@@ -1456,6 +1465,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
args.append(limit)
@@ -1487,9 +1500,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
proto = params.get('proto', '')
country = params.get('country', '')
asn = params.get('asn', '')
mitm_filter = params.get('mitm', '')
fmt = params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
@@ -1501,6 +1515,10 @@ class ProxyAPIHandler(BaseHTTPServer.BaseHTTPRequestHandler):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC'
@@ -1817,9 +1835,10 @@ class ProxyAPIServer(threading.Thread):
proto = query_params.get('proto', '')
country = query_params.get('country', '')
asn = query_params.get('asn', '')
mitm_filter = query_params.get('mitm', '')
fmt = query_params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
sql += ' AND proto=?'
@@ -1830,6 +1849,10 @@ class ProxyAPIServer(threading.Thread):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC LIMIT ?'
args.append(limit)
@@ -1851,9 +1874,10 @@ class ProxyAPIServer(threading.Thread):
proto = query_params.get('proto', '')
country = query_params.get('country', '')
asn = query_params.get('asn', '')
mitm_filter = query_params.get('mitm', '')
fmt = query_params.get('format', 'json')
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL'
sql = 'SELECT ip, port, proto, country, asn, avg_latency, protos_working FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
args = []
if proto:
sql += ' AND proto=?'
@@ -1864,6 +1888,10 @@ class ProxyAPIServer(threading.Thread):
if asn:
sql += ' AND asn=?'
args.append(int(asn))
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
sql += ' ORDER BY avg_latency ASC, tested DESC'
db = mysqlite.mysqlite(self.database, str)
@@ -1881,8 +1909,14 @@ class ProxyAPIServer(threading.Thread):
return json.dumps({'error': str(e)}), 'application/json', 500
elif path == '/proxies/count':
try:
mitm_filter = query_params.get('mitm', '')
sql = 'SELECT COUNT(*) FROM proxylist WHERE failed=0 AND proto IS NOT NULL AND last_seen >= strftime("%s","now") - 3600'
if mitm_filter == '0':
sql += ' AND mitm=0'
elif mitm_filter == '1':
sql += ' AND mitm=1'
db = mysqlite.mysqlite(self.database, str)
row = db.execute('SELECT COUNT(*) FROM proxylist WHERE failed=0 AND proto IS NOT NULL').fetchone()
row = db.execute(sql).fetchone()
return json.dumps({'count': row[0] if row else 0}), 'application/json', 200
except Exception as e:
return json.dumps({'error': str(e)}), 'application/json', 500

2
ppf.py
View File

@@ -1134,6 +1134,8 @@ def worker_v2_main(config):
'latency': round(latency_sec, 3),
'exit_ip': state.exit_ip,
'source_url': source_map.get(proxy_addr) or source_map.get(state.proxy, ''),
'checktype': state.last_check or '',
'target': state.last_target or '',
})
if completed % 50 == 0 or completed == len(all_jobs):

View File

@@ -299,7 +299,8 @@ class ProxyTestState(object):
'asn', 'isoldies', 'completion_queue', 'lock', 'results', 'completed',
'evaluated', 'last_latency_ms', 'exit_ip', 'reveals_headers',
'last_fail_category', 'original_failcount', 'had_ssl_test', 'ssl_success',
'cert_error', 'source_proto', 'protos_working'
'cert_error', 'source_proto', 'protos_working',
'last_check', 'last_target'
)
def __init__(self, ip, port, proto, failcount, success_count, total_duration,
@@ -343,6 +344,9 @@ class ProxyTestState(object):
# Protocol fingerprinting
self.source_proto = source_proto
self.protos_working = None
# Test provenance
self.last_check = None
self.last_target = None
def record_result(self, success, proto=None, duration=0, srv=None, tor=None, ssl=None, category=None, exit_ip=None, reveals_headers=None):
"""Record a single target test result. Thread-safe.
@@ -480,9 +484,9 @@ class ProxyTestState(object):
# and only if this test didn't detect MITM
if self.consecutive_success > 0 and (self.consecutive_success % 3) == 0 and not self.cert_error:
self.mitm = 0
self.consecutive_success += 1
self.success_count += 1
self.total_duration += int(last_good['duration'] * 1000)
self.consecutive_success = (self.consecutive_success or 0) + 1
self.success_count = (self.success_count or 0) + 1
self.total_duration = (self.total_duration or 0) + int(last_good['duration'] * 1000)
# Calculate average latency from successful tests (in ms)
durations = [s['duration'] for s in successes if s['duration']]
@@ -543,6 +547,9 @@ class TargetTestJob(object):
_log('JOB RUN #%d: %s -> %s (%s)' % (_sample_debug_counter,
self.proxy_state.proxy, self.target_srv, self.checktype), 'info')
network_stats.set_category('proxy')
# Track test provenance (overwritten on each attempt, last success wins)
self.proxy_state.last_check = self.checktype
self.proxy_state.last_target = self.target_srv
_dbg('test start: %s via %s' % (self.target_srv, self.checktype), self.proxy_state.proxy)
sock, proto, duration, tor, srv, failinc, is_ssl, err_cat = self._connect_and_test()
_dbg('connect result: sock=%s proto=%s err=%s' % (bool(sock), proto, err_cat), self.proxy_state.proxy)
@@ -682,6 +689,90 @@ class TargetTestJob(object):
protos.append(p)
return protos
def _fingerprint_protocol(self, pool):
"""Identify proxy protocol via lightweight handshake probes.
Sends protocol-specific greeting bytes directly to the proxy
and identifies the protocol from the response pattern.
Returns: 'socks5', 'socks4', 'http', or None
"""
ps = self.proxy_state
fp_timeout = min(config.watchd.timeout, 5)
torhost = pool.get_tor_host(self.worker_id) if pool else random.choice(config.torhosts)
for probe_fn, name in (
(self._probe_socks5, 'socks5'),
(self._probe_socks4, 'socks4'),
(self._probe_http, 'http'),
):
result = probe_fn(ps, torhost, fp_timeout)
if result:
_sample_dbg('fingerprint: %s detected' % result, ps.proxy)
return result
return None
def _probe_socks5(self, ps, torhost, timeout):
"""Probe for SOCKS5 protocol. Returns 'socks5' or None."""
try:
sock = rocksock.Rocksock(
host=ps.ip, port=int(ps.port),
proxies=[rocksock.RocksockProxyFromURL(tor_proxy_url(torhost))],
timeout=timeout
)
sock.connect()
sock.send('\x05\x01\x00')
res = sock.recv(2)
sock.disconnect()
if len(res) >= 1 and res[0] == '\x05':
return 'socks5'
except rocksock.RocksockException:
pass
except KeyboardInterrupt:
raise
return None
def _probe_socks4(self, ps, torhost, timeout):
"""Probe for SOCKS4 protocol. Returns 'socks4' or None."""
try:
sock = rocksock.Rocksock(
host=ps.ip, port=int(ps.port),
proxies=[rocksock.RocksockProxyFromURL(tor_proxy_url(torhost))],
timeout=timeout
)
sock.connect()
# CONNECT 1.1.1.1:80
sock.send('\x04\x01\x00\x50\x01\x01\x01\x01\x00')
res = sock.recv(2)
sock.disconnect()
if len(res) >= 2 and ord(res[0]) == 0 and ord(res[1]) in (0x5a, 0x5b, 0x5c, 0x5d):
return 'socks4'
except rocksock.RocksockException:
pass
except KeyboardInterrupt:
raise
return None
def _probe_http(self, ps, torhost, timeout):
"""Probe for HTTP CONNECT protocol. Returns 'http' or None."""
try:
sock = rocksock.Rocksock(
host=ps.ip, port=int(ps.port),
proxies=[rocksock.RocksockProxyFromURL(tor_proxy_url(torhost))],
timeout=timeout
)
sock.connect()
sock.send('CONNECT 1.1.1.1:80 HTTP/1.1\r\nHost: 1.1.1.1:80\r\n\r\n')
res = sock.recv(13)
sock.disconnect()
if res.startswith('HTTP/'):
return 'http'
except rocksock.RocksockException:
pass
except KeyboardInterrupt:
raise
return None
def _connect_and_test(self):
"""Connect to target through the proxy and send test packet.
@@ -702,6 +793,12 @@ class TargetTestJob(object):
protos = self._build_proto_order()
pool = connection_pool.get_pool()
# Fingerprint unknown proxies to avoid brute-force protocol guessing
if ps.proto is None and config.watchd.fingerprint:
detected = self._fingerprint_protocol(pool)
if detected:
protos = [detected] + [p for p in protos if p != detected]
# Phase 1: SSL handshake (if ssl_first enabled or SSL-only mode)
if config.watchd.ssl_first or self.checktype == 'none':
result = self._try_ssl_handshake(protos, pool)
@@ -1519,7 +1616,8 @@ class Proxywatchd():
dead_count += 1
args.append((effective_failcount, job.checktime, 1, job.country, job.proto,
job.success_count, job.total_duration, job.mitm,
job.consecutive_success, job.asn, job.protos_working, job.proxy))
job.consecutive_success, job.asn, job.protos_working,
job.last_check, job.last_target, job.proxy))
success_rate = (float(sc) / len(self.collected)) * 100
ret = True
@@ -1533,7 +1631,8 @@ class Proxywatchd():
if job.failcount == 0:
args.append((job.failcount, job.checktime, 1, job.country, job.proto,
job.success_count, job.total_duration, job.mitm,
job.consecutive_success, job.asn, job.protos_working, job.proxy))
job.consecutive_success, job.asn, job.protos_working,
job.last_check, job.last_target, job.proxy))
if job.last_latency_ms is not None:
latency_updates.append((job.proxy, job.last_latency_ms))
ret = False
@@ -1550,7 +1649,7 @@ class Proxywatchd():
if job.failcount == 0 and job.exit_ip]
with self._db_context() as db:
query = 'UPDATE proxylist SET failed=?,tested=?,dronebl=?,country=?,proto=?,success_count=?,total_duration=?,mitm=?,consecutive_success=?,asn=?,protos_working=? WHERE proxy=?'
query = 'UPDATE proxylist SET failed=?,tested=?,dronebl=?,country=?,proto=?,success_count=?,total_duration=?,mitm=?,consecutive_success=?,asn=?,protos_working=?,last_check=?,last_target=? WHERE proxy=?'
db.executemany(query, args)
# Batch update latency metrics for successful proxies

View File

@@ -1,69 +1,132 @@
irc.2600.net
irc.Undernet.Org
irc.abjects.net
irc.afternet.org
irc.allnetwork.org
irc.alphachat.net
irc.atrum.org
irc.austnet.org
irc.axon.pw
irc.ayochat.or.id
irc.azzurra.chat
irc.beyondirc.net
irc.bolchat.com
irc.brasirc.com.br
irc.canternet.org
irc.chat4all.org
irc.chatspike.net
irc.choopa.net
irc.coldfront.net
irc.cyberarmy.net
irc.chatzona.org
irc.cncirc.net
irc.coolsmile.net
irc.d-t-net.de
irc.dal.net
irc.darenet.org
irc.darkfasel.net
irc.darkmyst.org
irc.darkscience.net
irc.darkworld.network
irc.data.lt
irc.drlnet.com
irc.dynastynet.net
irc.dejatoons.net
irc.desirenet.org
irc.ecnet.org
irc.efnet.org
irc.efnet.pl
irc.enterthegame.com
irc.epiknet.org
irc.esper.net
irc.eu.dal.net
irc.eu.gamesurge.net
irc.euirc.net
irc.europnet.org
irc.eversible.com
irc.evolu.net
irc.explosionirc.net
irc.fdfnet.net
irc.fef.net
irc.financialchat.com
irc.forestnet.org
irc.freeunibg.eu
irc.gamesurge.net
irc.geeknode.org
irc.geekshed.net
irc.german-freakz.net
irc.german-elite.net
irc.gigairc.net
irc.gimp.org
irc.globalgamers.net
irc.greekirc.net
irc.goodchatting.com
irc.hackint.org
irc.hybridirc.com
irc.icq-chat.com
irc.immortal-anime.net
irc.indymedia.org
irc.irc-hispano.org
irc.irc2.hu
irc.irc4fun.net
irc.ircgate.it
irc.irchighway.net
irc.ircsource.net
irc.irctoo.net
irc.ircube.org
irc.ircworld.org
irc.irdsi.net
irc.kampungchat.org
irc.knightirc.net
irc.krey.net
irc.krono.net
irc.langochat.net
irc.krstarica.com
irc.libera.chat
irc.librairc.net
irc.lichtsnel.nl
irc.link-net.be
irc.lt-tech.org
irc.luatic.net
irc.maddshark.net
irc.newnet.net
irc.magicstar.net
irc.mibbit.net
irc.mindforge.org
irc.nationchat.org
irc.nightstar.net
irc.nullirc.net
irc.oftc.net
irc.onlinegamesnet.net
irc.othernet.org
irc.otherworlders.org
irc.oltreirc.net
irc.openjoke.org
irc.orixon.org
irc.oz.org
irc.p2pchat.net
irc.p2p-network.net
irc.perl.org
irc.phat-net.de
irc.pirc.pl
irc.ptnet.org
irc.quakenet.org
irc.recycled-irc.net
irc.retroit.org
irc.rezosup.org
irc.rizon.net
irc.rusnet.org.ru
irc.scarynet.org
irc.serenia.net
irc.scuttled.net
irc.serenity-irc.net
irc.servercentral.net
irc.shadowfire.org
irc.shadowworld.net
irc.simosnap.com
irc.skychatz.org
irc.skyrock.net
irc.slacknet.org
irc.slashnet.org
irc.snt.utwente.nl
irc.smurfnet.ch
irc.snoonet.org
irc.sorcery.net
irc.spacetronix.net
irc.spotchat.org
irc.st-city.net
irc.starlink-irc.org
irc.starlink.org
irc.staynet.org
irc.stormbit.net
irc.swiftirc.net
irc.teranova.net
irc.us.dal.net
irc.us.gamesurge.net
irc.synirc.net
irc.technet.chat
irc.tilde.chat
irc.tweakers.net
irc.undernet.org
irc.undermind.net
irc.wenet.ru
irc.whatnet.org
irc.wixchat.org
irc.worldirc.org
irc.xertion.org
irc.xevion.net
irc.zerofuzion.net
uk.quakenet.org
us.quakenet.org
open.ircnet.net
ssl.bongster.de