proxywatchd: make checktime constants configurable
this requires only saving the last checked time in `tested`. you can run the following sql statement to update the existing values in the database: sqlite3 proxylist.sqlite \ "update proxylist set tested=tested-(1800+(failed*3600)) where failed < 6"
This commit is contained in:
@@ -9,6 +9,8 @@ timeout = 15
|
||||
submit_after = 200
|
||||
use_ssl = false
|
||||
debug = false
|
||||
checktime = 3600
|
||||
perfail_checktime = 3600
|
||||
|
||||
[ppf]
|
||||
search = true
|
||||
|
||||
@@ -17,6 +17,8 @@ class Config(ComboParser):
|
||||
self.add_item('watchd', 'submit_after', int, 200, 'min. number of tested proxies for DB write', False)
|
||||
self.add_item('watchd', 'debug', bool, False, 'whether to print additional debug info', False)
|
||||
self.add_item('watchd', 'use_ssl', bool, False, 'whether to use SSL and port 6697 to connect to targets (slower)', False)
|
||||
self.add_item('watchd', 'checktime', int, 1800, 'base checking interval for proxies in db in seconds', False)
|
||||
self.add_item('watchd', 'perfail_checktime', int, 3600, 'additional checking interval for proxies in db in seconds per experienced failure', False)
|
||||
|
||||
self.add_item('ppf', 'search', bool, True, 'whether to use searx search engine to find new proxy lists', False)
|
||||
self.add_item('ppf', 'timeout', float, 15, 'timeout for blocking operations (connect/recv/...) for proxy checks in seconds', False)
|
||||
|
||||
@@ -20,7 +20,7 @@ class WorkerJob():
|
||||
self.proxy = proxy
|
||||
self.proto = proto
|
||||
self.failcount = failcount
|
||||
self.nextcheck = None
|
||||
self.checktime = None
|
||||
self.success_count = success_count
|
||||
self.total_duration = total_duration
|
||||
|
||||
@@ -103,7 +103,7 @@ class WorkerJob():
|
||||
return None, None, None, None, None, fail_inc
|
||||
|
||||
def run(self):
|
||||
self.nextcheck = (time.time() + 1800 + ((1+int(self.failcount)) * 3600))
|
||||
self.checktime = int(time.time())
|
||||
|
||||
sock, proto, duration, tor, srv, failinc = self.connect_socket()
|
||||
if not sock:
|
||||
@@ -115,7 +115,6 @@ class WorkerJob():
|
||||
# good data
|
||||
if re.match('^(:|ERROR|PING|PONG|NOTICE|\*\*\*)', recv, re.IGNORECASE):
|
||||
duration = (time.time() - duration)
|
||||
self.nextcheck = (time.time() + 1800)
|
||||
|
||||
#match = geolite2.lookup(proxy[0].split(':')[0])
|
||||
match = None
|
||||
@@ -241,8 +240,8 @@ class Proxywatchd():
|
||||
|
||||
def prepare_jobs(self):
|
||||
self._prep_db()
|
||||
q = 'SELECT proxy,proto,failed,success_count,total_duration FROM proxylist WHERE failed<? and tested<? ORDER BY RANDOM()' # ' LIMIT ?'
|
||||
rows = self.mysqlite.execute(q, (config.watchd.max_fail, time.time())).fetchall()
|
||||
q = 'SELECT proxy,proto,failed,success_count,total_duration FROM proxylist WHERE failed < ? and (tested + ? + (failed * ?)) < ? ORDER BY RANDOM()' # ' LIMIT ?'
|
||||
rows = self.mysqlite.execute(q, (config.watchd.max_fail, config.watchd.checktime, config.watchd.perfail_checktime , time.time())).fetchall()
|
||||
for row in rows:
|
||||
job = WorkerJob(row[0], row[1], row[2], row[3], row[4])
|
||||
self.jobs.append(job)
|
||||
@@ -265,7 +264,7 @@ class Proxywatchd():
|
||||
args = []
|
||||
for job in self.collected:
|
||||
if job.failcount == 0: sc += 1
|
||||
args.append( (job.failcount, job.nextcheck, 1, 'unknown', job.proto, job.success_count, job.total_duration, job.proxy) )
|
||||
args.append( (job.failcount, job.checktime, 1, 'unknown', job.proto, job.success_count, job.total_duration, job.proxy) )
|
||||
|
||||
success_rate = (float(sc) / len(self.collected)) * 100
|
||||
ret = True
|
||||
@@ -275,7 +274,7 @@ class Proxywatchd():
|
||||
args = []
|
||||
for job in self.collected:
|
||||
if job.failcount == 0:
|
||||
args.append( (job.failcount, job.nextcheck, 1, 'unknown', job.proto, job.success_count, job.total_duration, job.proxy) )
|
||||
args.append( (job.failcount, job.checktime, 1, 'unknown', job.proto, job.success_count, job.total_duration, job.proxy) )
|
||||
ret = False
|
||||
|
||||
_log("updating %d DB entries (success rate: %.2f%%)"%(len(self.collected), success_rate), 'watchd')
|
||||
|
||||
Reference in New Issue
Block a user