add and use combining class
This commit is contained in:
@@ -1,10 +1,10 @@
|
||||
[global]
|
||||
tor_host = 127.0.0.1:9050
|
||||
database = proxylist.sqlite
|
||||
proxy_max_fail = 5
|
||||
|
||||
[watcherd]
|
||||
proxy_file = false
|
||||
max_fail = 5
|
||||
threads = 10
|
||||
timeout = 15
|
||||
submit_after = 200
|
||||
|
||||
34
config.py
34
config.py
@@ -2,6 +2,9 @@ from ConfigParser import SafeConfigParser
|
||||
|
||||
_loaded = False
|
||||
|
||||
class phantom():
|
||||
def __init__(self): pass
|
||||
|
||||
def load():
|
||||
if _loaded: return
|
||||
global database, maxfail, search, torhosts, watchd_threads, checktime, timeout, read_timeout, submit_after, use_ssl, url_checktime, url_perfail_checktime
|
||||
@@ -11,27 +14,34 @@ def load():
|
||||
parser.read('config.ini')
|
||||
|
||||
database = parser.get('global', 'database')
|
||||
maxfail = parser.getint('global', 'proxy_max_fail')
|
||||
|
||||
search = parser.getboolean('proxyfind', 'search')
|
||||
|
||||
#maxfail = parser.getint('global', 'proxy_max_fail')
|
||||
torhosts = [ str(i).strip() for i in parser.get('global', 'tor_host').split(',') ]
|
||||
watchd_threads = parser.getint('watcherd', 'threads')
|
||||
timeout = parser.getint('watcherd', 'timeout')
|
||||
submit_after = parser.getint('watcherd', 'submit_after')
|
||||
use_ssl = parser.getboolean('watcherd', 'use_ssl')
|
||||
|
||||
global _watchd
|
||||
_watchd = phantom()
|
||||
_watchd.threads = parser.getint('watcherd', 'threads')
|
||||
_watchd.timeout = parser.getint('watcherd', 'timeout')
|
||||
_watchd.submit_after = parser.getint('watcherd', 'submit_after')
|
||||
_watchd.use_ssl = parser.getboolean('watcherd', 'use_ssl')
|
||||
_watchd.debug = parser.getboolean('watcherd', 'debug')
|
||||
_watchd.maxfail = parser.getint('watcherd', 'max_fail')
|
||||
|
||||
global _leechd
|
||||
_leechd = phantom()
|
||||
_leechd.checktime = parser.get('proxyfind', 'checktime')
|
||||
_leechd.perfail_checktime = parser.get('proxyfind', 'perfail_checktime')
|
||||
_leechd.search = parser.getboolean('proxyfind', 'search')
|
||||
|
||||
global watchd_debug
|
||||
watchd_debug = parser.getboolean('watcherd', 'debug')
|
||||
url_checktime = parser.get('proxyfind', 'checktime')
|
||||
url_perfail_checktime = parser.get('proxyfind', 'perfail_checktime')
|
||||
|
||||
# allow overriding select items from the commandline
|
||||
import argparse
|
||||
aparse = argparse.ArgumentParser()
|
||||
aparse.add_argument('--watchd_threads', help="how many proxy checker threads to spin up, 0==none, default: 10", type=int, default=watchd_threads, required=False)
|
||||
aparse.add_argument('--watchd_threads', help="how many proxy checker threads to spin up, 0==none, default: 10", type=int, default=_watchd.threads, required=False)
|
||||
args = aparse.parse_args()
|
||||
|
||||
watchd_threads = args.watchd_threads
|
||||
_watchd.threads = args.watchd_threads
|
||||
|
||||
global servers
|
||||
with open('servers.txt', 'r') as handle:
|
||||
|
||||
8
ppf.py
8
ppf.py
@@ -162,7 +162,7 @@ if __name__ == '__main__':
|
||||
sqlite.commit()
|
||||
import_from_file('import.txt', sqlite)
|
||||
|
||||
if config.search:
|
||||
if config._leechd.search:
|
||||
## load search terms
|
||||
with open('search_terms.txt', 'r') as f:
|
||||
search_terms = [ i.strip() for i in f.read().split('\n') if len(i.strip()) ]
|
||||
@@ -173,7 +173,7 @@ if __name__ == '__main__':
|
||||
empty = [ urignore.append(i.split('/')[2]) for i in searx_instances ]
|
||||
|
||||
# start proxy watcher
|
||||
if config.watchd_threads > 0:
|
||||
if config._watchd.threads > 0:
|
||||
watcherd = proxywatchd.Proxywatchd()
|
||||
watcherd.start()
|
||||
else:
|
||||
@@ -183,11 +183,11 @@ if __name__ == '__main__':
|
||||
while True:
|
||||
try:
|
||||
## any site that needs to be checked ?
|
||||
rows = [ [i[0],i[1],i[2]] for i in sqlite.execute('SELECT url,hash,error FROM uris WHERE (check_time+?+(error*?) <?) ORDER BY RANDOM() LIMIT 25', (config.url_checktime, config.url_perfail_checktime, time.time())).fetchall() ]
|
||||
rows = [ [i[0],i[1],i[2]] for i in sqlite.execute('SELECT url,hash,error FROM uris WHERE (check_time+?+(error*?) <?) ORDER BY RANDOM() LIMIT 25', (config._leechd.checktime, config._leechd.perfail_checktime, time.time())).fetchall() ]
|
||||
|
||||
if len(rows): proxyleech(sqlite,rows)
|
||||
## search for new website during free time
|
||||
elif config.search: proxyfind(sqlite)
|
||||
elif config._leechd.search: proxyfind(sqlite)
|
||||
## sleep
|
||||
else: time.sleep(10)
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ class WorkerJob():
|
||||
def connect_socket(self):
|
||||
srv = random.choice(config.servers).strip()
|
||||
protos = ['http', 'socks5', 'socks4'] if self.proto is None else [self.proto]
|
||||
server_port = 6697 if config.use_ssl else 6667
|
||||
server_port = 6697 if config._watchd.use_ssl else 6667
|
||||
|
||||
fail_inc = 1
|
||||
|
||||
@@ -39,12 +39,12 @@ class WorkerJob():
|
||||
]
|
||||
|
||||
try:
|
||||
sock = rocksock.Rocksock(host=srv, port=server_port, ssl=config.use_ssl, proxies=proxies, timeout=config.timeout)
|
||||
sock = rocksock.Rocksock(host=srv, port=server_port, ssl=config._watchd.use_ssl, proxies=proxies, timeout=config._watchd.timeout)
|
||||
sock.connect()
|
||||
sock.send('%s\n' % random.choice(['NICK', 'USER', 'JOIN', 'MODE', 'PART', 'INVITE', 'KNOCK', 'WHOIS', 'WHO', 'NOTICE', 'PRIVMSG', 'PING', 'QUIT']))
|
||||
return sock, proto, duration, torhost, srv, 0
|
||||
except rocksock.RocksockException as e:
|
||||
if config.watchd_debug:
|
||||
if config._watchd.debug:
|
||||
_log("proxy failed: %s://%s: %s"%(proto, self.proxy, e.get_errormessage()), 'debug')
|
||||
|
||||
et = e.get_errortype()
|
||||
@@ -203,14 +203,14 @@ class Proxywatchd():
|
||||
self.mysqlite.commit()
|
||||
self._close_db()
|
||||
|
||||
self.submit_after = config.submit_after # number of collected jobs before writing db
|
||||
self.submit_after = config._watchd.submit_after # number of collected jobs before writing db
|
||||
self.jobs = []
|
||||
self.collected = []
|
||||
|
||||
def prepare_jobs(self):
|
||||
self._prep_db()
|
||||
q = 'SELECT proxy,proto,failed,success_count,total_duration FROM proxylist WHERE failed<? and tested<? ORDER BY RANDOM()' # ' LIMIT ?'
|
||||
rows = self.mysqlite.execute(q, (config.maxfail, time.time())).fetchall()
|
||||
rows = self.mysqlite.execute(q, (config._watchd.maxfail, time.time())).fetchall()
|
||||
for row in rows:
|
||||
job = WorkerJob(row[0], row[1], row[2], row[3], row[4])
|
||||
self.jobs.append(job)
|
||||
@@ -256,7 +256,7 @@ class Proxywatchd():
|
||||
return ret
|
||||
|
||||
def start(self):
|
||||
if config.watchd_threads == 1 and _run_standalone:
|
||||
if config._watchd.threads == 1 and _run_standalone:
|
||||
return self._run()
|
||||
else:
|
||||
return self._run_background()
|
||||
@@ -273,7 +273,7 @@ class Proxywatchd():
|
||||
def _run(self):
|
||||
_log('starting...', 'watchd')
|
||||
|
||||
for i in range(config.watchd_threads):
|
||||
for i in range(config._watchd.threads):
|
||||
threadid = ''.join( [ random.choice(string.letters) for x in range(5) ] )
|
||||
wt = WorkerThread(threadid)
|
||||
if self.in_background:
|
||||
|
||||
Reference in New Issue
Block a user