diff --git a/comboparse.py b/comboparse.py new file mode 100644 index 0000000..911b882 --- /dev/null +++ b/comboparse.py @@ -0,0 +1,86 @@ +from ConfigParser import SafeConfigParser, NoOptionError +from argparse import ArgumentParser +import sys + +class _Dummy(): + pass + +class ComboParser(object): + def __init__(self, ini): + self.items = [] + self.cparser = SafeConfigParser() + self.aparser = ArgumentParser() + self.ini = ini + self.items = [] + self.loaded = False + + def add_item(self, section, name, type, default, desc, required): + self.items.append({ + 'section':section, + 'name':name, + 'type':type, + 'default':default, + 'required':required, + }) + self.aparser.add_argument( + '--%s.%s'%(section, name), + help='%s, default: (%s)'%(desc, str(default)), + type=type, + default=None, + required=False + ) + def load(self): + if self.loaded: return + self.loaded = True + + try: self.cparser.read(self.ini) + except: pass + args = self.aparser.parse_args() + for item in self.items: + try: + obj = getattr(self, item['section']) + except AttributeError: + setattr(self, item['section'], _Dummy()) + obj = getattr(self, item['section']) + + setattr(obj, item['name'], item['default']) + inner = getattr(obj, item['name']) + + item['found'] = True + try: + if item['type'] is bool : inner = self.cparser.getboolean(item['section'], item['name']) + elif item['type'] is float: inner = self.cparser.getfloat(item['section'], item['name']) + elif item['type'] is int : inner = self.cparser.getint(item['section'], item['name']) + elif item['type'] is str : inner = self.cparser.get(item['section'], item['name']) + except NoOptionError: + item['found'] = False + try: + arg = getattr(args, '%s.%s'%(item['section'], item['name'])) + if arg is not None: + inner = arg + item['found'] = True + except: pass + if not item['found']: + if item['required']: + sys.stderr.write('error: required config item "%s" not found in section "%s" of "%s"!\n'%(item['name'], item['section'], self.ini)) + sys.exit(1) + else: + sys.stderr.write('warning: assigned default value of "%s" to "%s.%s"\n'%(str(item['default']), item['section'], item['name'])) + setattr(obj, item['name'], inner) + + +# TEST CODE +def _main(): + config = ComboParser('config.ini') + config.add_item('watchd', 'debug', bool, False, 'turn additional debug info on', False) + config.add_item('watchd', 'float', float, 0.1, 'a float test', True) + config.add_item('watchd', 'strupp', str, "sup", 'a str test', False) + config.add_item('common', 'tor_host', str, '127.0.0.1:9050', 'address of tor proxy', True) + config.load() + print config.watchd.debug + print config.watchd.float + print config.watchd.strupp + print config.common.tor_host + +if __name__ == '__main__': + _main() diff --git a/config.ini.sample b/config.ini.sample index f3a4bff..459837c 100644 --- a/config.ini.sample +++ b/config.ini.sample @@ -1,9 +1,8 @@ -[global] -tor_host = 127.0.0.1:9050 +[common] +tor_hosts = 127.0.0.1:9050 database = proxylist.sqlite -[watcherd] -proxy_file = false +[watchd] max_fail = 5 threads = 10 timeout = 15 @@ -11,10 +10,10 @@ submit_after = 200 use_ssl = false debug = false -[proxyfind] +[ppf] search = true timeout = 30 -threads = 3 +http_retries = 1 checktime = 3600 perfail_checktime = 3600 diff --git a/config.py b/config.py index 32d7102..c632d70 100644 --- a/config.py +++ b/config.py @@ -1,48 +1,25 @@ -from ConfigParser import SafeConfigParser +from comboparse import ComboParser -_loaded = False +class Config(ComboParser): + def load(self): + super(Config, self).load() + self.torhosts = [ str(i).strip() for i in self.common.tor_hosts.split(',') ] + with open('servers.txt', 'r') as handle: + self.servers = [x.strip() for x in handle.readlines() if len(x.strip()) > 0] + def __init__(self): + super(Config, self).__init__('config.ini') + self.add_item('common', 'tor_hosts', str, '127.0.0.1:9050', 'comma-separated list of tor proxy address(es)', True) + self.add_item('common', 'database', str, 'proxylist.sqlite', 'filename of database', True) -class phantom(): - def __init__(self): pass + self.add_item('watchd', 'max_fail', int, 5, 'number of fails after which a proxy is considered dead', False) + self.add_item('watchd', 'threads', int, 10, 'number of threads watchd uses to check proxies', True) + self.add_item('watchd', 'timeout', int, 15, 'timeout for blocking operations (connect/recv/...) for proxy checks in seconds', False) + self.add_item('watchd', 'submit_after', int, 200, 'min. number of tested proxies for DB write', False) + self.add_item('watchd', 'debug', bool, False, 'whether to print additional debug info', False) + self.add_item('watchd', 'use_ssl', bool, False, 'whether to use SSL and port 6697 to connect to targets (slower)', False) -def load(): - if _loaded: return - global database, maxfail, search, torhosts, watchd_threads, checktime, timeout, read_timeout, submit_after, use_ssl, url_checktime, url_perfail_checktime - - ## read the config files - parser = SafeConfigParser() - parser.read('config.ini') - - database = parser.get('global', 'database') - #maxfail = parser.getint('global', 'proxy_max_fail') - torhosts = [ str(i).strip() for i in parser.get('global', 'tor_host').split(',') ] - - global _watchd - _watchd = phantom() - _watchd.threads = parser.getint('watcherd', 'threads') - _watchd.timeout = parser.getint('watcherd', 'timeout') - _watchd.submit_after = parser.getint('watcherd', 'submit_after') - _watchd.use_ssl = parser.getboolean('watcherd', 'use_ssl') - _watchd.debug = parser.getboolean('watcherd', 'debug') - _watchd.maxfail = parser.getint('watcherd', 'max_fail') - - global _leechd - _leechd = phantom() - _leechd.checktime = parser.get('proxyfind', 'checktime') - _leechd.perfail_checktime = parser.get('proxyfind', 'perfail_checktime') - _leechd.search = parser.getboolean('proxyfind', 'search') - - global watchd_debug - watchd_debug = parser.getboolean('watcherd', 'debug') - - # allow overriding select items from the commandline - import argparse - aparse = argparse.ArgumentParser() - aparse.add_argument('--watchd_threads', help="how many proxy checker threads to spin up, 0==none, default: 10", type=int, default=_watchd.threads, required=False) - args = aparse.parse_args() - - _watchd.threads = args.watchd_threads - - global servers - with open('servers.txt', 'r') as handle: - servers = [x.strip() for x in handle.readlines() if len(x.strip()) > 0] + self.add_item('ppf', 'search', bool, True, 'whether to use searx search engine to find new proxy lists', False) + self.add_item('ppf', 'timeout', float, 15, 'timeout for blocking operations (connect/recv/...) for proxy checks in seconds', False) + self.add_item('ppf', 'http_retries', int, 1, 'number of retries for http connects', False) + self.add_item('ppf', 'checktime', int, 3600, 'base checking interval for urls in db in seconds', False) + self.add_item('ppf', 'perfail_checktime', int, 3600, 'additional checking interval for urls in db in seconds per experienced failure', False) diff --git a/http2.py b/http2.py index 5db7b94..5520b37 100644 --- a/http2.py +++ b/http2.py @@ -75,7 +75,10 @@ def _is_textual_content_type(ct): return ct in TEXTUAL_CONTENT_TYPES_LIST class RsHttp(): - def __init__(self, host, port=80, ssl=False, follow_redirects=False, auto_set_cookies=False, keep_alive=False, timeout=60, user_agent=None, proxies=None, max_tries=10, **kwargs): + def __init__(self, host, port=80, ssl=False, follow_redirects=False, \ + auto_set_cookies=False, keep_alive=False, timeout=60, \ + user_agent=None, proxies=None, max_tries=10, log_errors=True, \ + **kwargs): self.host = host self.port = port self.use_ssl = ssl @@ -88,10 +91,16 @@ class RsHttp(): self.proxies = proxies self.cookies = dict() self.max_tries = max_tries + self.log_errors = log_errors + self.last_rs_exception = None self.headers = [] + def get_last_rocksock_exception(self): + return self.last_rs_exception + def _err_log(self, s): - sys.stderr.write(s + '\n') + if self.log_errors: + sys.stderr.write(s + '\n') def connect(self): return self.reconnect() @@ -242,6 +251,7 @@ class RsHttp(): self.conn.connect() return True except RocksockException as e: + self.last_rs_exception = e if e.errortype == rocksock.RS_ET_GAI and e.error==-2: # -2: Name does not resolve self.conn.disconnect() @@ -277,6 +287,7 @@ class RsHttp(): try: return func(*args) except RocksockException as e: + self.last_rs_exception = e self.conn.disconnect() if not self.reconnect(): return failret except IOError: @@ -310,6 +321,7 @@ class RsHttp(): self.use_ssl = use_ssl self.conn.disconnect() self.conn = None + self.reconnect() return self.get(url, extras) return hdr, res diff --git a/ppf.py b/ppf.py index 30c5809..8597bfe 100755 --- a/ppf.py +++ b/ppf.py @@ -8,10 +8,12 @@ import mysqlite import proxywatchd from misc import _log from soup_parser import soupify -import config +from config import Config from http2 import RsHttp, _parse_url import rocksock +config = Config() + base_header = { 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', } @@ -32,7 +34,7 @@ def import_from_file(fn, sqlite): exists = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE url=?',(u,)).fetchall() ] if exists: continue print('adding "%s"' % u) - sqlite.execute('INSERT INTO uris (added,url,check_time,error) VALUES (?,?,?,?)', (time.time(),u,0,1)) + sqlite.execute('INSERT INTO uris (added,url,check_time,error) VALUES (?,?,?,?)', (int(time.time()),u,0,1)) sqlite.commit() def fetch_contents(url): @@ -41,9 +43,25 @@ def fetch_contents(url): 'Accept-Language: en-US,en;q=0.8', 'Cache-Control: max-age=0', ] - proxies = [rocksock.RocksockProxyFromURL('socks4://%s' % random.choice( config.torhosts ))] - http = RsHttp(host,ssl=ssl,port=port, keep_alive=True, timeout=15, max_tries=1, follow_redirects=True, auto_set_cookies=True, proxies=proxies, user_agent='Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0') - if not http.connect(): return '' + while True: + proxies = [rocksock.RocksockProxyFromURL('socks4://%s' % random.choice( config.torhosts ))] + http = RsHttp(host,ssl=ssl,port=port, keep_alive=True, timeout=config.ppf.timeout, max_tries=config.ppf.http_retries, follow_redirects=True, auto_set_cookies=True, proxies=proxies, user_agent='Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0') + if not http.connect(): + _log("failed to connect to %s"%url, "ppf") + e = http.get_last_rocksock_exception() + if not e: + return '' + et = e.get_errortype() + ee = e.get_error() + ef = e.get_failedproxy() + if et == rocksock.RS_ET_OWN and \ + ee == rocksock.RS_E_TARGET_CONN_REFUSED \ + and ef == 0: + _log("could not connect to proxy 0 - check your connection", "error") + time.sleep(5) + continue + return '' + break hdr, res = http.get(uri, headers) res = res.encode('utf-8') if isinstance(res, unicode) else res for retry_message in retry_messages: @@ -51,6 +69,11 @@ def fetch_contents(url): return res +def valid_port(proxy): + ip, port = proxy.split(':') + port = int(port) + return port > 0 and port < 65535 + _known_proxies = {} def insert_proxies(proxies, uri, sqlite, timestamp): global _known_proxies @@ -59,9 +82,12 @@ def insert_proxies(proxies, uri, sqlite, timestamp): for k in known: _known_proxies[k[0]] = True - new = [ (timestamp,i,3,0,0,0) for i in proxies if not i in _known_proxies ] - for i in new: - _known_proxies[i[1]] = True + new = [] + for p in proxies: + if not p in _known_proxies: + if not valid_port(p): continue + new.append((timestamp,p,3,0,0,0)) + _known_proxies[p] = True if len(new): sqlite.executemany('INSERT INTO proxylist (added,proxy,failed,tested,success_count,total_duration) VALUES (?,?,?,?,?,?)', new) @@ -69,7 +95,7 @@ def insert_proxies(proxies, uri, sqlite, timestamp): _log('+%d item(s) from %s' % (len(new), uri), 'added') def proxyfind(sqlite = None): - if not sqlite: sqlite = mysqlite.mysqlite(config.database,str) + if not sqlite: sqlite = mysqlite.mysqlite(config.common.database,str) choice = random.choice(searx_instances) urls = [] @@ -91,7 +117,7 @@ def proxyfind(sqlite = None): if len(urls): query = [ 'url=?' for u in urls ] known = [ i[0] for i in sqlite.execute('SELECT url FROM uris WHERE %s' % ' OR '.join(query),urls).fetchall() ] - time_now = time.time() + time_now = int(time.time()) new = [ (time_now,i,0,5,0) for i in urls if not i in known ] if len(new): sqlite.executemany('INSERT INTO uris (added,url,check_time,error,driver) values(?,?,?,?,?)', new) @@ -135,13 +161,13 @@ def proxyleech(sqlite, rows): else: row[2] = 0 #check_time = (time.time() + 3600 + (3600 * row[2])) - sqlite.execute('UPDATE uris SET error=?,hash=?,check_time=? where url=?', (row[2],hash, time.time(),row[0])) + sqlite.execute('UPDATE uris SET error=?,hash=?,check_time=? where url=?', (row[2],hash, int(time.time()),row[0])) sqlite.commit() if not row[1] or row[2] > 0: return add = [] - time_now = time.time() + time_now = int(time.time()) for i in uniques: add.append(i) if len(add) > 500: @@ -155,14 +181,14 @@ if __name__ == '__main__': config.load() proxies={'http':'socks4://%s' % random.choice(config.torhosts),'https':'socks4://%s' % random.choice(config.torhosts)} - sqlite = mysqlite.mysqlite(config.database, str) + sqlite = mysqlite.mysqlite(config.common.database, str) ## create dbs if required sqlite.execute('CREATE TABLE IF NOT EXISTS uris (added INT, url TEXT, check_time INT, error INT, driver INT, hash TEXT)') sqlite.execute('CREATE TABLE IF NOT EXISTS proxylist (proxy BLOB, country BLOB, added INT, failed INT, tested INT, dronebl INT, proto TEXT, success_count INT, total_duration INT)') sqlite.commit() import_from_file('import.txt', sqlite) - if config._leechd.search: + if config.ppf.search: ## load search terms with open('search_terms.txt', 'r') as f: search_terms = [ i.strip() for i in f.read().split('\n') if len(i.strip()) ] @@ -173,7 +199,7 @@ if __name__ == '__main__': empty = [ urignore.append(i.split('/')[2]) for i in searx_instances ] # start proxy watcher - if config._watchd.threads > 0: + if config.watchd.threads > 0: watcherd = proxywatchd.Proxywatchd() watcherd.start() else: @@ -183,11 +209,11 @@ if __name__ == '__main__': while True: try: ## any site that needs to be checked ? - rows = [ [i[0],i[1],i[2]] for i in sqlite.execute('SELECT url,hash,error FROM uris WHERE (check_time+?+(error*?) 65535: + raise RocksockException(RS_E_INVALID_PROXY_URL, failedproxy=-1) self.host = host self.port = port @@ -166,6 +171,12 @@ class Rocksock(): self.sock = None self.timeout = timeout + def _translate_socket_error(self, e, pnum): + fp = self._failed_proxy(pnum) + if e.errno == errno.ECONNREFUSED: + return RocksockException(RS_E_TARGET_CONN_REFUSED, failedproxy=fp) + return RocksockException(e.errno, errortype=RS_ET_SYS, failedproxy=fp) + def _failed_proxy(self, pnum): if pnum < 0: return -1 if pnum >= len(self.proxychain)-1: return -1 @@ -189,7 +200,7 @@ class Rocksock(): except socket.timeout: raise RocksockException(RS_E_HIT_TIMEOUT, failedproxy=self._failed_proxy(0)) except socket.error as e: - raise RocksockException(e.errno, errortype=RS_ET_SYS, failedproxy=self._failed_proxy(0)) + raise self._translate_socket_error(e, 0) for pnum in xrange(1, len(self.proxychain)): curr = self.proxychain[pnum] @@ -204,7 +215,7 @@ class Rocksock(): #if hasattr(e, 'library'): subsystem = e.library raise RocksockException(RS_E_SSL_GENERIC, failedproxy=reason, errortype=RS_ET_SSL) except socket.error as e: - raise RocksockException(e.errno, errortype=RS_ET_SYS) + raise self._translate_socket_error(e, -1) except Exception as e: raise e """ @@ -252,6 +263,8 @@ class Rocksock(): chunk = self.sock.recv(n) except socket.timeout: raise RocksockException(RS_E_HIT_TIMEOUT, failedproxy=self._failed_proxy(pnum)) + except socket.error as e: + raise self._translate_socket_error(e, pnum) except ssl.SSLError as e: s = self._get_ssl_exception_reason(e) if s == 'The read operation timed out':